mirror of
https://github.com/dorny/test-reporter.git
synced 2026-02-01 19:05:23 -08:00
Compare commits
3 Commits
main
...
feature/21
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
12c7abe9ab | ||
|
|
3b5ad0231b | ||
|
|
c89704a410 |
2
.github/workflows/check-dist.yml
vendored
2
.github/workflows/check-dist.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v6
|
||||
- uses: actions/upload-artifact@v5
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: test-results
|
||||
path: __tests__/__results__/*.xml
|
||||
|
||||
320
.github/workflows/integration-tests-issue-217.yml
vendored
Normal file
320
.github/workflows/integration-tests-issue-217.yml
vendored
Normal file
@@ -0,0 +1,320 @@
|
||||
name: Integration Tests (#217) - fail-on-error/fail-on-empty
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'dist/**'
|
||||
- 'action.yml'
|
||||
- '.github/workflows/integration-tests.yml'
|
||||
- '__tests__/fixtures/integration/**'
|
||||
|
||||
jobs:
|
||||
# ============================================
|
||||
# Scenario 1: Passing tests, fail-on-error=true
|
||||
# Expected: Step passes, conclusion=success
|
||||
# ============================================
|
||||
test-passing-fail-on-error-true:
|
||||
name: "Passing tests | fail-on-error=true"
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
conclusion: ${{ steps.report.outputs.conclusion }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run test reporter
|
||||
id: report
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Integration Test - Passing (fail-on-error=true)'
|
||||
path: '__tests__/fixtures/integration/passing-tests.xml'
|
||||
reporter: java-junit
|
||||
fail-on-error: 'true'
|
||||
fail-on-empty: 'true'
|
||||
|
||||
- name: Validate results
|
||||
run: |
|
||||
echo "=== Test Results ==="
|
||||
echo "Step outcome: success (would have failed otherwise)"
|
||||
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
|
||||
echo "Passed: ${{ steps.report.outputs.passed }}"
|
||||
echo "Failed: ${{ steps.report.outputs.failed }}"
|
||||
|
||||
if [ "${{ steps.report.outputs.conclusion }}" != "success" ]; then
|
||||
echo "FAIL: Expected conclusion 'success' but got '${{ steps.report.outputs.conclusion }}'"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: All validations passed"
|
||||
|
||||
# ============================================
|
||||
# Scenario 2: Passing tests, fail-on-error=false
|
||||
# Expected: Step passes, conclusion=success
|
||||
# ============================================
|
||||
test-passing-fail-on-error-false:
|
||||
name: "Passing tests | fail-on-error=false"
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
conclusion: ${{ steps.report.outputs.conclusion }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run test reporter
|
||||
id: report
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Integration Test - Passing (fail-on-error=false)'
|
||||
path: '__tests__/fixtures/integration/passing-tests.xml'
|
||||
reporter: java-junit
|
||||
fail-on-error: 'false'
|
||||
fail-on-empty: 'true'
|
||||
|
||||
- name: Validate results
|
||||
run: |
|
||||
echo "=== Test Results ==="
|
||||
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
|
||||
|
||||
if [ "${{ steps.report.outputs.conclusion }}" != "success" ]; then
|
||||
echo "FAIL: Expected conclusion 'success' but got '${{ steps.report.outputs.conclusion }}'"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: All validations passed"
|
||||
|
||||
# ============================================
|
||||
# Scenario 3: Failing tests, fail-on-error=true
|
||||
# Expected: Step FAILS, conclusion=failure
|
||||
# ============================================
|
||||
test-failing-fail-on-error-true:
|
||||
name: "Failing tests | fail-on-error=true"
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
conclusion: ${{ steps.report.outputs.conclusion }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run test reporter
|
||||
id: report
|
||||
continue-on-error: true
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Integration Test - Failing (fail-on-error=true)'
|
||||
path: '__tests__/fixtures/integration/failing-tests.xml'
|
||||
reporter: java-junit
|
||||
fail-on-error: 'true'
|
||||
fail-on-empty: 'true'
|
||||
|
||||
- name: Validate results
|
||||
run: |
|
||||
echo "=== Test Results ==="
|
||||
echo "Step outcome: ${{ steps.report.outcome }}"
|
||||
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
|
||||
echo "Failed count: ${{ steps.report.outputs.failed }}"
|
||||
|
||||
# Step should fail
|
||||
if [ "${{ steps.report.outcome }}" != "failure" ]; then
|
||||
echo "FAIL: Expected step to fail but got '${{ steps.report.outcome }}'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Conclusion should be failure
|
||||
if [ "${{ steps.report.outputs.conclusion }}" != "failure" ]; then
|
||||
echo "FAIL: Expected conclusion 'failure' but got '${{ steps.report.outputs.conclusion }}'"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: All validations passed"
|
||||
|
||||
# ============================================
|
||||
# Scenario 4: Failing tests, fail-on-error=false
|
||||
# Expected: Step passes, conclusion=failure
|
||||
# Regression test for issue #217
|
||||
# ============================================
|
||||
test-failing-fail-on-error-false:
|
||||
name: "Failing tests | fail-on-error=false [#217]"
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
conclusion: ${{ steps.report.outputs.conclusion }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run test reporter
|
||||
id: report
|
||||
continue-on-error: true
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Integration Test - Failing (fail-on-error=false)'
|
||||
path: '__tests__/fixtures/integration/failing-tests.xml'
|
||||
reporter: java-junit
|
||||
fail-on-error: 'false'
|
||||
fail-on-empty: 'true'
|
||||
|
||||
- name: Validate results
|
||||
run: |
|
||||
echo "=== Test Results ==="
|
||||
echo "Step outcome: ${{ steps.report.outcome }}"
|
||||
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
|
||||
echo "Failed count: ${{ steps.report.outputs.failed }}"
|
||||
|
||||
# Step should pass (fail-on-error is false)
|
||||
if [ "${{ steps.report.outcome }}" != "success" ]; then
|
||||
echo "FAIL: Expected step to pass but got '${{ steps.report.outcome }}'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Conclusion SHOULD be 'failure' because tests failed
|
||||
# Regression test for issue #217
|
||||
if [ "${{ steps.report.outputs.conclusion }}" != "failure" ]; then
|
||||
echo "========================================"
|
||||
echo "REGRESSION DETECTED (Issue #217)"
|
||||
echo "========================================"
|
||||
echo "Expected conclusion 'failure' but got '${{ steps.report.outputs.conclusion }}'"
|
||||
echo "The check conclusion should reflect test results,"
|
||||
echo "independent of the fail-on-error setting."
|
||||
echo "========================================"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: All validations passed"
|
||||
|
||||
# ============================================
|
||||
# Scenario 5: Empty results, fail-on-empty=true
|
||||
# Expected: Step FAILS
|
||||
# ============================================
|
||||
test-empty-fail-on-empty-true:
|
||||
name: "Empty results | fail-on-empty=true"
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
conclusion: ${{ steps.report.outputs.conclusion || 'N/A' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run test reporter
|
||||
id: report
|
||||
continue-on-error: true
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Integration Test - Empty (fail-on-empty=true)'
|
||||
path: '__tests__/fixtures/integration/nonexistent-*.xml'
|
||||
reporter: java-junit
|
||||
fail-on-error: 'true'
|
||||
fail-on-empty: 'true'
|
||||
|
||||
- name: Validate results
|
||||
run: |
|
||||
echo "=== Test Results ==="
|
||||
echo "Step outcome: ${{ steps.report.outcome }}"
|
||||
|
||||
# Step should fail (no files found)
|
||||
if [ "${{ steps.report.outcome }}" != "failure" ]; then
|
||||
echo "FAIL: Expected step to fail but got '${{ steps.report.outcome }}'"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: Step correctly failed on empty results"
|
||||
|
||||
# ============================================
|
||||
# Scenario 6: Empty results, fail-on-empty=false
|
||||
# Expected: Step passes, conclusion=success
|
||||
# ============================================
|
||||
test-empty-fail-on-empty-false:
|
||||
name: "Empty results | fail-on-empty=false"
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
conclusion: ${{ steps.report.outputs.conclusion || 'N/A' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run test reporter
|
||||
id: report
|
||||
continue-on-error: true
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Integration Test - Empty (fail-on-empty=false)'
|
||||
path: '__tests__/fixtures/integration/nonexistent-*.xml'
|
||||
reporter: java-junit
|
||||
fail-on-error: 'true'
|
||||
fail-on-empty: 'false'
|
||||
|
||||
- name: Validate results
|
||||
run: |
|
||||
echo "=== Test Results ==="
|
||||
echo "Step outcome: ${{ steps.report.outcome }}"
|
||||
|
||||
# Step should pass (fail-on-empty is false)
|
||||
if [ "${{ steps.report.outcome }}" != "success" ]; then
|
||||
echo "FAIL: Expected step to pass but got '${{ steps.report.outcome }}'"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: Step correctly passed with empty results"
|
||||
|
||||
# ============================================
|
||||
# Summary job to report overall status
|
||||
# ============================================
|
||||
summary:
|
||||
name: "Test Summary"
|
||||
needs:
|
||||
- test-passing-fail-on-error-true
|
||||
- test-passing-fail-on-error-false
|
||||
- test-failing-fail-on-error-true
|
||||
- test-failing-fail-on-error-false
|
||||
- test-empty-fail-on-empty-true
|
||||
- test-empty-fail-on-empty-false
|
||||
runs-on: ubuntu-slim
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate summary
|
||||
run: |
|
||||
# Helper function to convert result to emoji
|
||||
result_to_emoji() {
|
||||
case "$1" in
|
||||
success) echo "✅ Pass" ;;
|
||||
failure) echo "❌ Fail" ;;
|
||||
cancelled) echo "⚪ Cancelled" ;;
|
||||
skipped) echo "⏭️ Skipped" ;;
|
||||
*) echo "❓ Unknown" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper function to format conclusion
|
||||
conclusion_to_badge() {
|
||||
case "$1" in
|
||||
success) echo "🟢 success" ;;
|
||||
failure) echo "🔴 failure" ;;
|
||||
N/A) echo "⚫ N/A" ;;
|
||||
*) echo "⚪ $1" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Generate markdown summary
|
||||
cat >> $GITHUB_STEP_SUMMARY << 'EOF'
|
||||
# Integration Test Results
|
||||
|
||||
## fail-on-error / fail-on-empty Scenarios
|
||||
|
||||
| Scenario | Test Results | fail-on-error | fail-on-empty | Expected | Conclusion | Result |
|
||||
|----------|--------------|---------------|---------------|----------|------------|--------|
|
||||
EOF
|
||||
|
||||
echo "| 1 | All pass | \`true\` | \`true\` | Step: pass, Check: success | $(conclusion_to_badge "${{ needs.test-passing-fail-on-error-true.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-passing-fail-on-error-true.result }}") |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| 2 | All pass | \`false\` | \`true\` | Step: pass, Check: success | $(conclusion_to_badge "${{ needs.test-passing-fail-on-error-false.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-passing-fail-on-error-false.result }}") |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| 3 | Some fail | \`true\` | \`true\` | Step: fail, Check: failure | $(conclusion_to_badge "${{ needs.test-failing-fail-on-error-true.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-failing-fail-on-error-true.result }}") |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| 4 | Some fail | \`false\` | \`true\` | Step: pass, Check: failure | $(conclusion_to_badge "${{ needs.test-failing-fail-on-error-false.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-failing-fail-on-error-false.result }}") |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| 5 | Empty | \`true\` | \`true\` | Step: fail | $(conclusion_to_badge "${{ needs.test-empty-fail-on-empty-true.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-empty-fail-on-empty-true.result }}") |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| 6 | Empty | \`true\` | \`false\` | Step: pass | $(conclusion_to_badge "${{ needs.test-empty-fail-on-empty-false.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-empty-fail-on-empty-false.result }}") |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
cat >> $GITHUB_STEP_SUMMARY << 'EOF'
|
||||
|
||||
---
|
||||
|
||||
> **Scenario 4** is a regression test for [issue #217](https://github.com/dorny/test-reporter/issues/217).
|
||||
> It verifies that `conclusion` output correctly reflects test failures, independent of `fail-on-error` setting.
|
||||
> When `fail-on-error=false`, the step should pass but `conclusion` should still be `failure` if tests failed.
|
||||
|
||||
EOF
|
||||
|
||||
# Also print to console
|
||||
echo "=== Integration Test Summary ==="
|
||||
echo "Scenario 1 (pass, fail-on-error=true): ${{ needs.test-passing-fail-on-error-true.result }}"
|
||||
echo "Scenario 2 (pass, fail-on-error=false): ${{ needs.test-passing-fail-on-error-false.result }}"
|
||||
echo "Scenario 3 (fail, fail-on-error=true): ${{ needs.test-failing-fail-on-error-true.result }}"
|
||||
echo "Scenario 4 (fail, fail-on-error=false): ${{ needs.test-failing-fail-on-error-false.result }} (regression test for #217)"
|
||||
echo "Scenario 5 (empty, fail-on-empty=true): ${{ needs.test-empty-fail-on-empty-true.result }}"
|
||||
echo "Scenario 6 (empty, fail-on-empty=false): ${{ needs.test-empty-fail-on-empty-false.result }}"
|
||||
@@ -1,14 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 2.5.0
|
||||
* Feature: Add Nette Tester support with `tester-junit` reporter https://github.com/dorny/test-reporter/pull/707
|
||||
* Maintenance: Bump actions/upload-artifact from 5 to 6 https://github.com/dorny/test-reporter/pull/695
|
||||
|
||||
## 2.4.0
|
||||
* Feature: Add PHPUnit support with JUnit XML dialect parser https://github.com/dorny/test-reporter/pull/422
|
||||
* Feature: Add JUnit XML sample files and tests for validation https://github.com/dorny/test-reporter/pull/701
|
||||
* Fix: Refactor deprecated `String.substr()` function to use `String.substring()` https://github.com/dorny/test-reporter/pull/704
|
||||
|
||||
## 2.3.0
|
||||
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
|
||||
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
|
||||
|
||||
23
README.md
23
README.md
@@ -20,7 +20,6 @@ This [Github Action](https://github.com/features/actions) displays test results
|
||||
- Java / [JUnit](https://junit.org/)
|
||||
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
|
||||
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
|
||||
- PHP / [PHPUnit](https://phpunit.de/) / [Nette Tester](https://tester.nette.org/)
|
||||
- Ruby / [RSpec](https://rspec.info/)
|
||||
- Swift / xUnit
|
||||
|
||||
@@ -148,7 +147,6 @@ jobs:
|
||||
# java-junit
|
||||
# jest-junit
|
||||
# mocha-json
|
||||
# phpunit-junit
|
||||
# python-xunit
|
||||
# rspec-json
|
||||
# swift-xunit
|
||||
@@ -316,27 +314,6 @@ This is due to the fact Java stack traces don't contain a full path to the sourc
|
||||
Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>phpunit-junit</summary>
|
||||
|
||||
[PHPUnit](https://phpunit.de/) can generate JUnit XML via CLI:
|
||||
`phpunit --log-junit reports/phpunit-junit.xml`
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>tester-junit</summary>
|
||||
|
||||
[Nette Tester](https://tester.nette.org/) can generate JUnit XML via CLI:
|
||||
|
||||
```bash
|
||||
tester -s -o junit tests/ > reports/tester-junit.xml
|
||||
```
|
||||
|
||||
**Note:** Nette Tester's JUnit output doesn't include test suite names. The parser will use the report file name as the suite name and automatically group tests by directory structure.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>jest-junit</summary>
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
|
||||
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|
||||
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
|
||||
```
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
❌ testCase9
|
||||
AssertionError: Assertion error message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
```
|
||||
@@ -1,22 +0,0 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
|
||||
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
⚪ testCase4
|
||||
❌ testCase5
|
||||
AssertionError: Expected value did not match.
|
||||
❌ testCase6
|
||||
ArithmeticError: Division by zero.
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
```
|
||||
@@ -1,30 +0,0 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/phpunit/junit-basic.xml](#user-content-r0)|8 ✅|1 ❌||16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/junit-basic.xml</a>
|
||||
**9** tests were completed in **16s** with **8** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|
||||
|[Tests.Authentication.Login](#user-content-r0s1)|3 ✅|||4s|
|
||||
|[Tests.Registration](#user-content-r0s2)|3 ✅|||7s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
|
||||
```
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
❌ testCase9
|
||||
AssertionError: Assertion error message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Authentication.Login</a>
|
||||
```
|
||||
✅ testCase4
|
||||
✅ testCase5
|
||||
✅ testCase6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
```
|
||||
@@ -1,88 +0,0 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/phpunit/phpcheckstyle-phpunit.xml](#user-content-r0)|28 ✅|2 ❌||41ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/phpcheckstyle-phpunit.xml</a>
|
||||
**30** tests were completed in **41ms** with **28** passed, **2** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[CommentsTest](#user-content-r0s0)|3 ✅|||7ms|
|
||||
|[DeprecationTest](#user-content-r0s1)|1 ✅|||1ms|
|
||||
|[GoodTest](#user-content-r0s2)|4 ✅|||5ms|
|
||||
|[IndentationTest](#user-content-r0s3)|8 ✅|||8ms|
|
||||
|[MetricsTest](#user-content-r0s4)|1 ✅|||4ms|
|
||||
|[NamingTest](#user-content-r0s5)|2 ✅|||3ms|
|
||||
|[OptimizationTest](#user-content-r0s6)|1 ✅|||1ms|
|
||||
|[OtherTest](#user-content-r0s7)|2 ✅|2 ❌||7ms|
|
||||
|[PHPTagsTest](#user-content-r0s8)|2 ✅|||1ms|
|
||||
|[ProhibitedTest](#user-content-r0s9)|1 ✅|||1ms|
|
||||
|[StrictCompareTest](#user-content-r0s10)|1 ✅|||2ms|
|
||||
|[UnusedTest](#user-content-r0s11)|2 ✅|||2ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">CommentsTest</a>
|
||||
```
|
||||
✅ testGoodDoc
|
||||
✅ testComments
|
||||
✅ testTODOs
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">DeprecationTest</a>
|
||||
```
|
||||
✅ testDeprecations
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">GoodTest</a>
|
||||
```
|
||||
✅ testGood
|
||||
✅ testDoWhile
|
||||
✅ testAnonymousFunction
|
||||
✅ testException
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">IndentationTest</a>
|
||||
```
|
||||
✅ testTabIndentation
|
||||
✅ testSpaceIndentation
|
||||
✅ testSpaceIndentationArray
|
||||
✅ testGoodSpaceIndentationArray
|
||||
✅ testGoodIndentationNewLine
|
||||
✅ testGoodIndentationSpaces
|
||||
✅ testBadSpaces
|
||||
✅ testBadSpaceAfterControl
|
||||
```
|
||||
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">MetricsTest</a>
|
||||
```
|
||||
✅ testMetrics
|
||||
```
|
||||
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">NamingTest</a>
|
||||
```
|
||||
✅ testNaming
|
||||
✅ testFunctionNaming
|
||||
```
|
||||
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">OptimizationTest</a>
|
||||
```
|
||||
✅ testTextAfterClosingTag
|
||||
```
|
||||
### ❌ <a id="user-content-r0s7" href="#user-content-r0s7">OtherTest</a>
|
||||
```
|
||||
❌ testOther
|
||||
PHPUnit\Framework\ExpectationFailedException
|
||||
❌ testException
|
||||
PHPUnit\Framework\ExpectationFailedException
|
||||
✅ testEmpty
|
||||
✅ testSwitchCaseNeedBreak
|
||||
```
|
||||
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">PHPTagsTest</a>
|
||||
```
|
||||
✅ testTextAfterClosingTag
|
||||
✅ testClosingTagNotNeeded
|
||||
```
|
||||
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">ProhibitedTest</a>
|
||||
```
|
||||
✅ testProhibited
|
||||
```
|
||||
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">StrictCompareTest</a>
|
||||
```
|
||||
✅ testStrictCompare
|
||||
```
|
||||
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">UnusedTest</a>
|
||||
```
|
||||
✅ testGoodUnused
|
||||
✅ testBadUnused
|
||||
```
|
||||
@@ -1,41 +0,0 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/phpunit/phpunit.xml](#user-content-r0)|10 ✅|2 ❌||148ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/phpunit/phpunit.xml</a>
|
||||
**12** tests were completed in **148ms** with **10** passed, **2** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[CLI Arguments](#user-content-r0s0)||2 ❌||140ms|
|
||||
|[PHPUnit\Event\CollectingDispatcherTest](#user-content-r0s1)|2 ✅|||4ms|
|
||||
|[PHPUnit\Event\DeferringDispatcherTest](#user-content-r0s2)|4 ✅|||3ms|
|
||||
|[PHPUnit\Event\DirectDispatcherTest](#user-content-r0s3)|4 ✅|||1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">CLI Arguments</a>
|
||||
```
|
||||
❌ targeting-traits-with-coversclass-attribute-is-deprecated.phpt
|
||||
PHPUnit\Framework\PhptAssertionFailedError
|
||||
❌ targeting-traits-with-usesclass-attribute-is-deprecated.phpt
|
||||
PHPUnit\Framework\PhptAssertionFailedError
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">PHPUnit\Event\CollectingDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.CollectingDispatcherTest
|
||||
✅ testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation
|
||||
✅ testCollectsDispatchedEventsUntilFlushed
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">PHPUnit\Event\DeferringDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.DeferringDispatcherTest
|
||||
✅ testCollectsEventsUntilFlush
|
||||
✅ testFlushesCollectedEvents
|
||||
✅ testSubscriberCanBeRegistered
|
||||
✅ testTracerCanBeRegistered
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">PHPUnit\Event\DirectDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.DirectDispatcherTest
|
||||
✅ testDispatchesEventToKnownSubscribers
|
||||
✅ testDispatchesEventToTracers
|
||||
✅ testRegisterRejectsUnknownSubscriber
|
||||
✅ testDispatchRejectsUnknownEventType
|
||||
```
|
||||
@@ -1,20 +0,0 @@
|
||||

|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/nette-tester/BootstrapFormRenderer-report.xml](#user-content-r0)|4 ✅|||300ms|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/nette-tester/BootstrapFormRenderer-report.xml</a>
|
||||
**4** tests were completed in **300ms** with **4** passed, **0** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[BootstrapFormRenderer-report.xml](#user-content-r0s0)|4 ✅|||300ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">BootstrapFormRenderer-report.xml</a>
|
||||
```
|
||||
KdybyTests/BootstrapFormRenderer
|
||||
✅ BootstrapRendererTest.phpt::testRenderingBasics
|
||||
✅ BootstrapRendererTest.phpt::testRenderingIndividual
|
||||
✅ BootstrapRendererTest.phpt::testRenderingComponents
|
||||
✅ BootstrapRendererTest.phpt::testMultipleFormsInTemplate
|
||||
```
|
||||
</details>
|
||||
@@ -1,87 +0,0 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/nette-tester/tester-v1.7-report.xml](#user-content-r0)|61 ✅|1 ❌|3 ⚪|2s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/nette-tester/tester-v1.7-report.xml</a>
|
||||
**65** tests were completed in **2s** with **61** passed, **1** failed and **3** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[tester-v1.7-report.xml](#user-content-r0s0)|61 ✅|1 ❌|3 ⚪|2s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">tester-v1.7-report.xml</a>
|
||||
```
|
||||
tests/Framework
|
||||
⚪ Dumper.toPhp.php7.phpt
|
||||
✅ Assert.contains.phpt
|
||||
✅ Assert.count.phpt
|
||||
✅ Assert.equal.phpt
|
||||
✅ Assert.equal.recursive.phpt::testSimple
|
||||
✅ Assert.equal.recursive.phpt::testMultiple
|
||||
✅ Assert.equal.recursive.phpt::testDeep
|
||||
✅ Assert.equal.recursive.phpt::testCross
|
||||
✅ Assert.equal.recursive.phpt::testThirdParty
|
||||
✅ Assert.error.phpt
|
||||
✅ Assert.exception.phpt
|
||||
✅ Assert.false.phpt
|
||||
✅ Assert.match.phpt
|
||||
✅ Assert.match.regexp.phpt
|
||||
✅ Assert.nan.phpt
|
||||
✅ Assert.noError.phpt
|
||||
✅ Assert.same.phpt
|
||||
✅ Assert.null.phpt
|
||||
✅ Assert.true.phpt
|
||||
✅ Assert.truthy.phpt
|
||||
✅ DataProvider.load.phpt
|
||||
✅ Assert.type.phpt
|
||||
✅ DataProvider.parseAnnotation.phpt
|
||||
✅ DataProvider.testQuery.phpt
|
||||
✅ DomQuery.css2Xpath.phpt
|
||||
✅ DomQuery.fromHtml.phpt
|
||||
✅ DomQuery.fromXml.phpt
|
||||
✅ Dumper.dumpException.phpt
|
||||
✅ Dumper.color.phpt
|
||||
✅ Dumper.toLine.phpt
|
||||
✅ Dumper.toPhp.recursion.phpt
|
||||
✅ Dumper.toPhp.phpt
|
||||
✅ FileMock.phpt
|
||||
✅ Helpers.escapeArg.phpt
|
||||
✅ Helpers.parseDocComment.phpt
|
||||
✅ TestCase.annotationThrows.phpt
|
||||
✅ TestCase.annotationThrows.setUp.tearDown.phpt
|
||||
✅ TestCase.annotationThrows.syntax.phpt
|
||||
✅ TestCase.basic.phpt
|
||||
✅ TestCase.dataProvider.generator.phpt
|
||||
✅ TestCase.dataProvider.phpt
|
||||
✅ TestCase.invalidMethods.phpt
|
||||
✅ TestCase.invalidProvider.phpt
|
||||
✅ TestCase.order.error.phpt
|
||||
✅ TestCase.order.errorMuted.phpt
|
||||
✅ TestCase.order.phpt
|
||||
✅ Prevent loop in error handling. The #268 regression. (TestCase.ownErrorHandler.phpt)
|
||||
tests/CodeCoverage
|
||||
⚪ Collector.start.phpt
|
||||
✅ PhpParser.parse.lines.phpt
|
||||
✅ PhpParser.parse.methods.phpt
|
||||
✅ CloverXMLGenerator.phpt
|
||||
✅ PhpParser.parse.edge.phpt
|
||||
✅ PhpParser.parse.lines-of-code.phpt
|
||||
✅ PhpParser.parse.namespaces.phpt
|
||||
tests/Runner
|
||||
✅ CommandLine.phpt
|
||||
⚪ HhvmPhpInterpreter.phpt
|
||||
✅ Runner.find-tests.phpt
|
||||
✅ Job.phpt
|
||||
✅ ZendPhpExecutable.phpt
|
||||
✅ Runner.multiple.phpt
|
||||
✅ Runner.edge.phpt
|
||||
✅ Runner.stop-on-fail.phpt
|
||||
❌ Runner.multiple-fails.phpt
|
||||
Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\Assert::match()
|
||||
✅ Runner.annotations.phpt
|
||||
tests/RunnerOutput
|
||||
✅ JUnitPrinter.phpt
|
||||
```
|
||||
@@ -6878,153 +6878,3 @@ at java.lang.Thread.run(Thread.java:748)
|
||||
"totalTime": 2126531.0000000005,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/java/junit4-basic.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2113.871,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1051,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 3441,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 6605.870999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2508,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1230.8159999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Assertion error message",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase9",
|
||||
"result": "failed",
|
||||
"time": 982,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication",
|
||||
"totalTime": 9076.816,
|
||||
},
|
||||
],
|
||||
"totalTime": 15682.687,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/java/junit4-complete.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2436,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1534,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 822,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase4",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Expected value did not match.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase5",
|
||||
"result": "failed",
|
||||
"time": 2902.412,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "ArithmeticError: Division by zero.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase6",
|
||||
"result": "failed",
|
||||
"time": 3819,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2944,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1625.275,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 16082.687,
|
||||
},
|
||||
],
|
||||
"totalTime": 16082.687,
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -1,628 +0,0 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`phpunit-junit tests report from junit-basic.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/phpunit/junit-basic.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2113.871,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1051,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 3441,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 6605.870999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase4",
|
||||
"result": "success",
|
||||
"time": 2244,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase5",
|
||||
"result": "success",
|
||||
"time": 781,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase6",
|
||||
"result": "success",
|
||||
"time": 1331,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication.Login",
|
||||
"totalTime": 4356,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2508,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1230.8159999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "",
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Assertion error message",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase9",
|
||||
"result": "failed",
|
||||
"time": 982,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication",
|
||||
"totalTime": 9076.816,
|
||||
},
|
||||
],
|
||||
"totalTime": 15682.687,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`phpunit-junit tests report from phpcheckstyle-phpunit.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/phpunit/phpcheckstyle-phpunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodDoc",
|
||||
"result": "success",
|
||||
"time": 5.093,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testComments",
|
||||
"result": "success",
|
||||
"time": 0.921,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTODOs",
|
||||
"result": "success",
|
||||
"time": 0.6880000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "CommentsTest",
|
||||
"totalTime": 6.702,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDeprecations",
|
||||
"result": "success",
|
||||
"time": 0.9740000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DeprecationTest",
|
||||
"totalTime": 0.9740000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGood",
|
||||
"result": "success",
|
||||
"time": 2.6470000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDoWhile",
|
||||
"result": "success",
|
||||
"time": 1.0219999999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testAnonymousFunction",
|
||||
"result": "success",
|
||||
"time": 0.8,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testException",
|
||||
"result": "success",
|
||||
"time": 0.888,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "GoodTest",
|
||||
"totalTime": 5.357,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTabIndentation",
|
||||
"result": "success",
|
||||
"time": 0.857,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSpaceIndentation",
|
||||
"result": "success",
|
||||
"time": 0.929,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSpaceIndentationArray",
|
||||
"result": "success",
|
||||
"time": 0.975,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodSpaceIndentationArray",
|
||||
"result": "success",
|
||||
"time": 1.212,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodIndentationNewLine",
|
||||
"result": "success",
|
||||
"time": 0.859,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodIndentationSpaces",
|
||||
"result": "success",
|
||||
"time": 0.78,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadSpaces",
|
||||
"result": "success",
|
||||
"time": 1.1199999999999999,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadSpaceAfterControl",
|
||||
"result": "success",
|
||||
"time": 0.9219999999999999,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "IndentationTest",
|
||||
"totalTime": 7.654,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testMetrics",
|
||||
"result": "success",
|
||||
"time": 4.146999999999999,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "MetricsTest",
|
||||
"totalTime": 4.146999999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testNaming",
|
||||
"result": "success",
|
||||
"time": 1.426,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testFunctionNaming",
|
||||
"result": "success",
|
||||
"time": 1.271,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "NamingTest",
|
||||
"totalTime": 2.697,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTextAfterClosingTag",
|
||||
"result": "success",
|
||||
"time": 0.9940000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "OptimizationTest",
|
||||
"totalTime": 0.9940000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "OtherTest::testOther
|
||||
We expect 20 warnings
|
||||
Failed asserting that 19 matches expected 20.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:24",
|
||||
"line": 12,
|
||||
"message": "PHPUnit\\Framework\\ExpectationFailedException",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testOther",
|
||||
"result": "failed",
|
||||
"time": 5.2509999999999994,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "OtherTest::testException
|
||||
We expect 1 error
|
||||
Failed asserting that 0 matches expected 1.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:40",
|
||||
"line": 31,
|
||||
"message": "PHPUnit\\Framework\\ExpectationFailedException",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testException",
|
||||
"result": "failed",
|
||||
"time": 0.751,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testEmpty",
|
||||
"result": "success",
|
||||
"time": 0.42700000000000005,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSwitchCaseNeedBreak",
|
||||
"result": "success",
|
||||
"time": 0.901,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "OtherTest",
|
||||
"totalTime": 7.329,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTextAfterClosingTag",
|
||||
"result": "success",
|
||||
"time": 0.641,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testClosingTagNotNeeded",
|
||||
"result": "success",
|
||||
"time": 0.631,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPTagsTest",
|
||||
"totalTime": 1.272,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testProhibited",
|
||||
"result": "success",
|
||||
"time": 0.9380000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "ProhibitedTest",
|
||||
"totalTime": 0.9380000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testStrictCompare",
|
||||
"result": "success",
|
||||
"time": 1.578,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "StrictCompareTest",
|
||||
"totalTime": 1.578,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodUnused",
|
||||
"result": "success",
|
||||
"time": 0.94,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadUnused",
|
||||
"result": "success",
|
||||
"time": 0.895,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "UnusedTest",
|
||||
"totalTime": 1.835,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`phpunit-junit tests report from phpunit test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/phpunit/phpunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.CollectingDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation",
|
||||
"result": "success",
|
||||
"time": 1.441,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCollectsDispatchedEventsUntilFlushed",
|
||||
"result": "success",
|
||||
"time": 2.815,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\CollectingDispatcherTest",
|
||||
"totalTime": 4.256,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.DeferringDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCollectsEventsUntilFlush",
|
||||
"result": "success",
|
||||
"time": 1.6720000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testFlushesCollectedEvents",
|
||||
"result": "success",
|
||||
"time": 0.661,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSubscriberCanBeRegistered",
|
||||
"result": "success",
|
||||
"time": 0.33399999999999996,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTracerCanBeRegistered",
|
||||
"result": "success",
|
||||
"time": 0.262,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\DeferringDispatcherTest",
|
||||
"totalTime": 2.928,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.DirectDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchesEventToKnownSubscribers",
|
||||
"result": "success",
|
||||
"time": 0.17,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchesEventToTracers",
|
||||
"result": "success",
|
||||
"time": 0.248,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testRegisterRejectsUnknownSubscriber",
|
||||
"result": "success",
|
||||
"time": 0.257,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchRejectsUnknownEventType",
|
||||
"result": "success",
|
||||
"time": 0.11900000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\DirectDispatcherTest",
|
||||
"totalTime": 0.794,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
|
||||
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
|
||||
"line": undefined,
|
||||
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "targeting-traits-with-coversclass-attribute-is-deprecated.phpt",
|
||||
"result": "failed",
|
||||
"time": 68.151,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
|
||||
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
|
||||
"line": undefined,
|
||||
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "targeting-traits-with-usesclass-attribute-is-deprecated.phpt",
|
||||
"result": "failed",
|
||||
"time": 64.268,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "CLI Arguments",
|
||||
"totalTime": 140.397,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
@@ -1,485 +0,0 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`tester-junit tests parses complex test names from BootstrapFormRenderer-report.xml 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/nette-tester/BootstrapFormRenderer-report.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "KdybyTests/BootstrapFormRenderer",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingBasics",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingIndividual",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingComponents",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testMultipleFormsInTemplate",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "BootstrapFormRenderer-report.xml",
|
||||
"totalTime": 300,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`tester-junit tests report from tester-v1.7-report.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/nette-tester/tester-v1.7-report.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests/Framework",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.php7.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.contains.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.count.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testSimple",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testMultiple",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testDeep",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testCross",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testThirdParty",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.error.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.exception.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.false.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.match.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.match.regexp.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.nan.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.noError.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.same.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.null.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.true.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.truthy.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.load.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.type.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.parseAnnotation.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.testQuery.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.css2Xpath.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.fromHtml.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.fromXml.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.dumpException.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.color.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toLine.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.recursion.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "FileMock.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Helpers.escapeArg.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Helpers.parseDocComment.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.setUp.tearDown.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.syntax.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.basic.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.dataProvider.generator.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.dataProvider.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.invalidMethods.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.invalidProvider.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.error.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.errorMuted.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Prevent loop in error handling. The #268 regression. (TestCase.ownErrorHandler.phpt)",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/CodeCoverage",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Collector.start.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.lines.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.methods.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "CloverXMLGenerator.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.edge.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.lines-of-code.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.namespaces.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/Runner",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "CommandLine.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "HhvmPhpInterpreter.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.find-tests.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Job.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "ZendPhpExecutable.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.multiple.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.edge.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.stop-on-fail.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\\Assert::match()",
|
||||
"line": undefined,
|
||||
"message": "Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\\Assert::match()",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Runner.multiple-fails.phpt",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.annotations.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/RunnerOutput",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "JUnitPrinter.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "tester-v1.7-report.xml",
|
||||
"totalTime": 2100,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
@@ -1,2 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites/>
|
||||
@@ -1,31 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a basic JUnit-style XML example to highlight the basis structure.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
<testsuites time="15.682687">
|
||||
<testsuite name="Tests.Registration" time="6.605871">
|
||||
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
|
||||
</testsuite>
|
||||
<testsuite name="Tests.Authentication" time="9.076816">
|
||||
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
|
||||
<!--
|
||||
<testsuite name="Tests.Authentication.Login" time="4.356">
|
||||
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
|
||||
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
|
||||
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
|
||||
</testsuite>
|
||||
-->
|
||||
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
|
||||
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
|
||||
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
|
||||
<failure message="Assertion error message" type="AssertionError">
|
||||
<!-- Call stack printed here -->
|
||||
</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
141
__tests__/fixtures/external/java/junit4-complete.xml
vendored
141
__tests__/fixtures/external/java/junit4-complete.xml
vendored
@@ -1,141 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a JUnit-style XML example with commonly used tags and attributes.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
|
||||
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
|
||||
the <testsuites> element if there is only a single top-level <testsuite> element (which
|
||||
is then used as the root element).
|
||||
|
||||
name Name of the entire test run
|
||||
tests Total number of tests in this file
|
||||
failures Total number of failed tests in this file
|
||||
errors Total number of errored tests in this file
|
||||
skipped Total number of skipped tests in this file
|
||||
assertions Total number of assertions for all tests in this file
|
||||
time Aggregated time of all tests in this file in seconds
|
||||
timestamp Date and time of when the test run was executed (in ISO 8601 format)
|
||||
-->
|
||||
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
|
||||
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
|
||||
|
||||
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
|
||||
There can be many test suites in an XML file, and there can be test suites under other
|
||||
test suites.
|
||||
|
||||
name Name of the test suite (e.g. class name or folder name)
|
||||
tests Total number of tests in this suite
|
||||
failures Total number of failed tests in this suite
|
||||
errors Total number of errored tests in this suite
|
||||
skipped Total number of skipped tests in this suite
|
||||
assertions Total number of assertions for all tests in this suite
|
||||
time Aggregated time of all tests in this file in seconds
|
||||
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
|
||||
file Source code file of this test suite
|
||||
-->
|
||||
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
|
||||
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
|
||||
file="tests/registration.code">
|
||||
|
||||
<!-- <properties> Test suites (and test cases, see below) can have additional
|
||||
properties such as environment variables or version numbers. -->
|
||||
<properties>
|
||||
<!-- <property> Each property has a name and value. Some tools also support
|
||||
properties with text values instead of value attributes. -->
|
||||
<property name="version" value="1.774" />
|
||||
<property name="commit" value="ef7bebf" />
|
||||
<property name="browser" value="Google Chrome" />
|
||||
<property name="ci" value="https://github.com/actions/runs/1234" />
|
||||
<property name="config">
|
||||
Config line #1
|
||||
Config line #2
|
||||
Config line #3
|
||||
</property>
|
||||
</properties>
|
||||
|
||||
<!-- <system-out> Optionally data written to standard out for the suite.
|
||||
Also supported on a test case level, see below. -->
|
||||
<system-out>Data written to standard out.</system-out>
|
||||
|
||||
<!-- <system-err> Optionally data written to standard error for the suite.
|
||||
Also supported on a test case level, see below. -->
|
||||
<system-err>Data written to standard error.</system-err>
|
||||
|
||||
<!-- <testcase> There are one or more test cases in a test suite. A test passed
|
||||
if there isn't an additional result element (skipped, failure, error).
|
||||
|
||||
name The name of this test case, often the method name
|
||||
classname The name of the parent class/folder, often the same as the suite's name
|
||||
assertions Number of assertions checked during test case execution
|
||||
time Execution time of the test in seconds
|
||||
file Source code file of this test case
|
||||
line Source code line number of the start of this test case
|
||||
-->
|
||||
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
|
||||
time="2.436" file="tests/registration.code" line="24" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
|
||||
time="1.534" file="tests/registration.code" line="62" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
|
||||
time="0.822" file="tests/registration.code" line="102" />
|
||||
|
||||
<!-- Example of a test case that was skipped -->
|
||||
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
|
||||
time="0" file="tests/registration.code" line="164">
|
||||
<!-- <skipped> Indicates that the test was not executed. Can have an optional
|
||||
message describing why the test was skipped. -->
|
||||
<skipped message="Test was skipped." />
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case that failed. -->
|
||||
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
|
||||
time="2.902412" file="tests/registration.code" line="202">
|
||||
<!-- <failure> The test failed because one of the assertions/checks failed.
|
||||
Can have a message and failure type, often the assertion type or class. The text
|
||||
content of the element often includes the failure description or stack trace. -->
|
||||
<failure message="Expected value did not match." type="AssertionError">
|
||||
<!-- Failure description or stack trace -->
|
||||
</failure>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case that had errors. -->
|
||||
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
|
||||
time="3.819" file="tests/registration.code" line="235">
|
||||
<!-- <error> The test had an unexpected error during execution. Can have a
|
||||
message and error type, often the exception type or class. The text
|
||||
content of the element often includes the error description or stack trace. -->
|
||||
<error message="Division by zero." type="ArithmeticError">
|
||||
<!-- Error description or stack trace -->
|
||||
</error>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case with outputs. -->
|
||||
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
|
||||
time="2.944" file="tests/registration.code" line="287">
|
||||
<!-- <system-out> Optional data written to standard out for the test case. -->
|
||||
<system-out>Data written to standard out.</system-out>
|
||||
|
||||
<!-- <system-err> Optional data written to standard error for the test case. -->
|
||||
<system-err>Data written to standard error.</system-err>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case with properties -->
|
||||
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
|
||||
time="1.625275" file="tests/registration.code" line="302">
|
||||
<!-- <properties> Some tools also support properties for test cases. -->
|
||||
<properties>
|
||||
<property name="priority" value="high" />
|
||||
<property name="language" value="english" />
|
||||
<property name="author" value="Adrian" />
|
||||
<property name="attachment" value="screenshots/dashboard.png" />
|
||||
<property name="attachment" value="screenshots/users.png" />
|
||||
<property name="description">
|
||||
This text describes the purpose of this test case and provides
|
||||
an overview of what the test does and how it works.
|
||||
</property>
|
||||
</properties>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,28 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a basic JUnit-style XML example to highlight the basis structure.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
<testsuites time="15.682687">
|
||||
<testsuite name="Tests.Registration" time="6.605871">
|
||||
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
|
||||
</testsuite>
|
||||
<testsuite name="Tests.Authentication" time="9.076816">
|
||||
<testsuite name="Tests.Authentication.Login" time="4.356">
|
||||
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
|
||||
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
|
||||
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
|
||||
</testsuite>
|
||||
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
|
||||
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
|
||||
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
|
||||
<failure message="Assertion error message" type="AssertionError">
|
||||
<!-- Call stack printed here -->
|
||||
</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,212 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="/workspace/phpcheckstyle/phpunit.xml" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
|
||||
<testsuite name="PHPUnitTestSuite" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
|
||||
<testsuite name="CommentsTest" file="/workspace/phpcheckstyle/test/CommentsTest.php" tests="3" assertions="12" errors="0" failures="0" skipped="0" time="0.006702">
|
||||
<testcase name="testGoodDoc" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="12" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.005093"/>
|
||||
<testcase name="testComments" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="30" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000921">
|
||||
<system-out>File "./test/sample/bad_comments.php" warning, line 4 - Avoid Shell/Perl like comments.
|
||||
File "./test/sample/bad_comments.php" warning, line 6 - The class Comments must have a docblock comment.
|
||||
File "./test/sample/bad_comments.php" warning, line 10 - The function testComment must have a docblock comment.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment returns a value and must include @returns in its docblock.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment parameters must match those in its docblock @param.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment throws an exception and must include @throws in its docblock.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testTODOs" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="48" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000688">
|
||||
<system-out>File "./test/sample/todo.php" warning, line 3 - TODO: The todo message.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="DeprecationTest" file="/workspace/phpcheckstyle/test/DeprecationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000974">
|
||||
<testcase name="testDeprecations" file="/workspace/phpcheckstyle/test/DeprecationTest.php" line="12" class="DeprecationTest" classname="DeprecationTest" assertions="4" time="0.000974">
|
||||
<system-out>File "./test/sample/bad_deprecation.php" warning, line 17 - split is deprecated since PHP 5.3. explode($pattern, $string) or preg_split('@'.$pattern.'@', $string) must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 19 - ereg is deprecated since PHP 5.3. preg_match('@'.$pattern.'@', $string) must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 21 - session_register is deprecated since PHP 5.3. $_SESSION must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 23 - mysql_db_query is deprecated since PHP 5.3. mysql_select_db and mysql_query must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 25 - $HTTP_GET_VARS is deprecated since PHP 5.3. $_GET must be used instead.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="GoodTest" file="/workspace/phpcheckstyle/test/GoodTest.php" tests="4" assertions="16" errors="0" failures="0" skipped="0" time="0.005357">
|
||||
<testcase name="testGood" file="/workspace/phpcheckstyle/test/GoodTest.php" line="12" class="GoodTest" classname="GoodTest" assertions="4" time="0.002647"/>
|
||||
<testcase name="testDoWhile" file="/workspace/phpcheckstyle/test/GoodTest.php" line="32" class="GoodTest" classname="GoodTest" assertions="4" time="0.001022"/>
|
||||
<testcase name="testAnonymousFunction" file="/workspace/phpcheckstyle/test/GoodTest.php" line="50" class="GoodTest" classname="GoodTest" assertions="4" time="0.000800"/>
|
||||
<testcase name="testException" file="/workspace/phpcheckstyle/test/GoodTest.php" line="68" class="GoodTest" classname="GoodTest" assertions="4" time="0.000888"/>
|
||||
</testsuite>
|
||||
<testsuite name="IndentationTest" file="/workspace/phpcheckstyle/test/IndentationTest.php" tests="8" assertions="32" errors="0" failures="0" skipped="0" time="0.007654">
|
||||
<testcase name="testTabIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="12" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000857">
|
||||
<system-out>File "./test/sample/bad_indentation.php" warning, line 8 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 15 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 17 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 18 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 19 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 20 - Whitespace indentation must not be used.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSpaceIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="30" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000929">
|
||||
<system-out>File "./test/sample/bad_indentation.php" warning, line 10 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 10 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 13 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 13 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 15 - The indentation level must be 8 but was 4.
|
||||
File "./test/sample/bad_indentation.php" warning, line 16 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 16 - The indentation level must be 8 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 17 - The indentation level must be 8 but was 3.
|
||||
File "./test/sample/bad_indentation.php" warning, line 18 - The indentation level must be 8 but was 5.
|
||||
File "./test/sample/bad_indentation.php" warning, line 19 - The indentation level must be 8 but was 6.
|
||||
File "./test/sample/bad_indentation.php" warning, line 20 - The indentation level must be 4 but was 1.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="51" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000975">
|
||||
<system-out>File "./test/sample/bad_indentation_array.php" warning, line 10 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 10 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 13 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 13 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 16 - The indentation level must be 12 but was 8.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 24 - The indentation level must be 12 but was 8.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 29 - The indentation level must be 8 but was 12.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 15 - Undeclared or unused variable: $aVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 19 - Undeclared or unused variable: $bVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 23 - Undeclared or unused variable: $cVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 27 - Undeclared or unused variable: $dVar.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testGoodSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="72" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001212"/>
|
||||
<testcase name="testGoodIndentationNewLine" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="93" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000859"/>
|
||||
<testcase name="testGoodIndentationSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="116" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000780"/>
|
||||
<testcase name="testBadSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="137" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001120">
|
||||
<system-out>File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must follow ,.
|
||||
File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must precede {.
|
||||
File "./test/sample/bad_spaces.php" warning, line 19 - Whitespace must follow if.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede =.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow =.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede +.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow +.
|
||||
File "./test/sample/bad_spaces.php" info, line 25 - Whitespace must not precede ,.
|
||||
File "./test/sample/bad_spaces.php" info, line 26 - Whitespace must not follow !.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testBadSpaceAfterControl" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="155" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000922">
|
||||
<system-out>File "./test/sample/bad_space_after_control.php" warning, line 19 - Whitespace must not follow if.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="MetricsTest" file="/workspace/phpcheckstyle/test/MetricsTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.004147">
|
||||
<testcase name="testMetrics" file="/workspace/phpcheckstyle/test/MetricsTest.php" line="12" class="MetricsTest" classname="MetricsTest" assertions="4" time="0.004147">
|
||||
<system-out>File "./test/sample/bad_metrics.php" warning, line 21 - The function testMetrics's number of parameters (6) must not exceed 4.
|
||||
File "./test/sample/bad_metrics.php" info, line 55 - Line is too long. [233/160]
|
||||
File "./test/sample/bad_metrics.php" warning, line 21 - The Cyclomatic Complexity of function testMetrics is too high. [15/10]
|
||||
File "./test/sample/bad_metrics.php" warning, line 244 - The testMetrics function body length is too long. [223/200]
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="NamingTest" file="/workspace/phpcheckstyle/test/NamingTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.002697">
|
||||
<testcase name="testNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="12" class="NamingTest" classname="NamingTest" assertions="4" time="0.001426">
|
||||
<system-out>File "./test/sample/_bad_naming.php" error, line 11 - Constant _badly_named_constant name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 13 - Constant bad_CONST name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 17 - Top level variable $XXX name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 20 - Variable x name length is too short.
|
||||
File "./test/sample/_bad_naming.php" error, line 28 - Class badlynamedclass name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 32 - Member variable $YYY name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 37 - The constructor name must be __construct().
|
||||
File "./test/sample/_bad_naming.php" error, line 44 - Function Badlynamedfunction name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 47 - Local variable $ZZZ name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 54 - Protected function Badlynamedfunction2 name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 61 - Private function badlynamedfunction3 name should follow the pattern /^_[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 70 - Interface _badlynamedinterface name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 75 - File _bad_naming.php name should follow the pattern /^[a-zA-Z][a-zA-Z0-9._]*$/.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testFunctionNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="32" class="NamingTest" classname="NamingTest" assertions="4" time="0.001271"/>
|
||||
</testsuite>
|
||||
<testsuite name="OptimizationTest" file="/workspace/phpcheckstyle/test/OptimizationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000994">
|
||||
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/OptimizationTest.php" line="12" class="OptimizationTest" classname="OptimizationTest" assertions="4" time="0.000994">
|
||||
<system-out>File "./test/sample/bad_optimisation.php" warning, line 18 - count function must not be used inside a loop.
|
||||
File "./test/sample/bad_optimisation.php" warning, line 23 - count function must not be used inside a loop.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="OtherTest" file="/workspace/phpcheckstyle/test/OtherTest.php" tests="4" assertions="13" errors="0" failures="2" skipped="0" time="0.007329">
|
||||
<testcase name="testOther" file="/workspace/phpcheckstyle/test/OtherTest.php" line="12" class="OtherTest" classname="OtherTest" assertions="4" time="0.005251">
|
||||
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testOther
|
||||
We expect 20 warnings
|
||||
Failed asserting that 19 matches expected 20.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:24</failure>
|
||||
<system-out>File "./test/sample/bad_other.php" warning, line 17 - All arguments with default values must be at the end of the block or statement.
|
||||
File "./test/sample/bad_other.php" warning, line 21 - Errors must not be silenced when calling a function.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
|
||||
File "./test/sample/bad_other.php" warning, line 37 - TODO: Show todos
|
||||
File "./test/sample/bad_other.php" warning, line 40 - Avoid empty statements (;;).
|
||||
File "./test/sample/bad_other.php" warning, line 42 - Boolean operators (&&) must be used instead of logical operators (AND).
|
||||
File "./test/sample/bad_other.php" warning, line 42 - Empty if block.
|
||||
File "./test/sample/bad_other.php" warning, line 48 - Heredoc syntax must not be used.
|
||||
File "./test/sample/bad_other.php" warning, line 52 - The statement if must contain its code within a {} block.
|
||||
File "./test/sample/bad_other.php" warning, line 54 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_other.php" warning, line 54 - The statement while must contain its code within a {} block.
|
||||
File "./test/sample/bad_other.php" warning, line 66 - The switch statement must have a default case.
|
||||
File "./test/sample/bad_other.php" warning, line 79 - The default case of a switch statement must be located after all other cases.
|
||||
File "./test/sample/bad_other.php" warning, line 93 - Unary operators (++ or --) must not be used inside a control statement
|
||||
File "./test/sample/bad_other.php" warning, line 95 - Assigments (=) must not be used inside a control statement.
|
||||
File "./test/sample/bad_other.php" warning, line 106 - File ./test/sample/bad_other.php must not have multiple class declarations.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testException" file="/workspace/phpcheckstyle/test/OtherTest.php" line="31" class="OtherTest" classname="OtherTest" assertions="1" time="0.000751">
|
||||
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testException
|
||||
We expect 1 error
|
||||
Failed asserting that 0 matches expected 1.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:40</failure>
|
||||
</testcase>
|
||||
<testcase name="testEmpty" file="/workspace/phpcheckstyle/test/OtherTest.php" line="50" class="OtherTest" classname="OtherTest" assertions="4" time="0.000427">
|
||||
<system-out>File "./test/sample/empty.php" warning, line 1 - The file ./test/sample/empty.php is empty.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSwitchCaseNeedBreak" file="/workspace/phpcheckstyle/test/OtherTest.php" line="69" class="OtherTest" classname="OtherTest" assertions="4" time="0.000901">
|
||||
<system-out>File "./test/sample/switch_multi_case.php" warning, line 10 - The case statement must contain a break.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="PHPTagsTest" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001272">
|
||||
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="12" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000641">
|
||||
<system-out>File "./test/sample/bad_php_tags_text_after_end.php" warning, line 9 - A PHP close tag must not be included at the end of the file.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testClosingTagNotNeeded" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="30" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000631">
|
||||
<system-out>File "./test/sample/bad_php_tags_end_not_needed.php" warning, line 1 - PHP tag should be at the beginning of the line.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="ProhibitedTest" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000938">
|
||||
<testcase name="testProhibited" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" line="13" class="ProhibitedTest" classname="ProhibitedTest" assertions="4" time="0.000938">
|
||||
<system-out>File "./test/sample/bad_prohibited.php" warning, line 18 - The function exec must not be called.
|
||||
File "./test/sample/bad_prohibited.php" warning, line 20 - Token T_PRINT must not be used.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="StrictCompareTest" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.001578">
|
||||
<testcase name="testStrictCompare" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" line="12" class="StrictCompareTest" classname="StrictCompareTest" assertions="4" time="0.001578">
|
||||
<system-out>File "./test/sample/bad_strictcompare.php" warning, line 14 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 19 - Consider using a strict comparison operator instead of !=.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 24 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 29 - Consider using a strict comparison operator instead of ==.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="UnusedTest" file="/workspace/phpcheckstyle/test/UnusedTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001835">
|
||||
<testcase name="testGoodUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="13" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000940"/>
|
||||
<testcase name="testBadUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="32" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000895">
|
||||
<system-out>File "./test/sample/bad_unused.php" warning, line 23 - Function _testUnused has unused code after RETURN.
|
||||
File "./test/sample/bad_unused.php" warning, line 27 - The function _testUnused parameter $b is not used.
|
||||
File "./test/sample/bad_unused.php" warning, line 18 - Unused private function: _testUnused.
|
||||
File "./test/sample/bad_unused.php" warning, line 20 - Undeclared or unused variable: $c.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
5
__tests__/fixtures/integration/empty-tests.xml
Normal file
5
__tests__/fixtures/integration/empty-tests.xml
Normal file
@@ -0,0 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="EmptySuite" tests="0" failures="0" errors="0" time="0">
|
||||
<testsuite name="EmptySuite" tests="0" failures="0" errors="0" time="0">
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
14
__tests__/fixtures/integration/failing-tests.xml
Normal file
14
__tests__/fixtures/integration/failing-tests.xml
Normal file
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="FailingSuite" tests="3" failures="1" errors="0" time="0.5">
|
||||
<testsuite name="FailingSuite" tests="3" failures="1" errors="0" time="0.5">
|
||||
<testcase name="should pass test 1" classname="FailingSuite" time="0.1"/>
|
||||
<testcase name="should fail test 2" classname="FailingSuite" time="0.2">
|
||||
<failure message="Assertion failed" type="AssertionError">
|
||||
Expected: true
|
||||
Received: false
|
||||
at Object.test (/test/example.test.js:10:5)
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase name="should pass test 3" classname="FailingSuite" time="0.2"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
8
__tests__/fixtures/integration/passing-tests.xml
Normal file
8
__tests__/fixtures/integration/passing-tests.xml
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="PassingSuite" tests="3" failures="0" errors="0" time="0.5">
|
||||
<testsuite name="PassingSuite" tests="3" failures="0" errors="0" time="0.5">
|
||||
<testcase name="should pass test 1" classname="PassingSuite" time="0.1"/>
|
||||
<testcase name="should pass test 2" classname="PassingSuite" time="0.2"/>
|
||||
<testcase name="should pass test 3" classname="PassingSuite" time="0.2"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,9 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="0" skipped="0" tests="4" time="0.3" timestamp="2026-01-01T16:36:10">
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingIndividual]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingIndividual]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingComponents]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingComponents]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testMultipleFormsInTemplate]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testMultipleFormsInTemplate]"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,83 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="1" skipped="3" tests="65" time="2.1" timestamp="2026-01-01T16:50:52">
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.php7.phpt" name="tests/Framework/Dumper.toPhp.php7.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/CodeCoverage/Collector.start.phpt" name="tests/CodeCoverage/Collector.start.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.lines.phpt" name="tests/CodeCoverage/PhpParser.parse.lines.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.methods.phpt" name="tests/CodeCoverage/PhpParser.parse.methods.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/CloverXMLGenerator.phpt" name="tests/CodeCoverage/CloverXMLGenerator.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.edge.phpt" name="tests/CodeCoverage/PhpParser.parse.edge.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.lines-of-code.phpt" name="tests/CodeCoverage/PhpParser.parse.lines-of-code.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.namespaces.phpt" name="tests/CodeCoverage/PhpParser.parse.namespaces.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.contains.phpt" name="tests/Framework/Assert.contains.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.count.phpt" name="tests/Framework/Assert.count.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.phpt" name="tests/Framework/Assert.equal.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testSimple]" name="tests/Framework/Assert.equal.recursive.phpt [method=testSimple]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testMultiple]" name="tests/Framework/Assert.equal.recursive.phpt [method=testMultiple]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testDeep]" name="tests/Framework/Assert.equal.recursive.phpt [method=testDeep]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testCross]" name="tests/Framework/Assert.equal.recursive.phpt [method=testCross]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testThirdParty]" name="tests/Framework/Assert.equal.recursive.phpt [method=testThirdParty]"/>
|
||||
<testcase classname="tests/Framework/Assert.error.phpt" name="tests/Framework/Assert.error.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.exception.phpt" name="tests/Framework/Assert.exception.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.false.phpt" name="tests/Framework/Assert.false.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.match.phpt" name="tests/Framework/Assert.match.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.match.regexp.phpt" name="tests/Framework/Assert.match.regexp.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.nan.phpt" name="tests/Framework/Assert.nan.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.noError.phpt" name="tests/Framework/Assert.noError.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.same.phpt" name="tests/Framework/Assert.same.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.null.phpt" name="tests/Framework/Assert.null.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.true.phpt" name="tests/Framework/Assert.true.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.truthy.phpt" name="tests/Framework/Assert.truthy.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.load.phpt" name="tests/Framework/DataProvider.load.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.type.phpt" name="tests/Framework/Assert.type.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.parseAnnotation.phpt" name="tests/Framework/DataProvider.parseAnnotation.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.testQuery.phpt" name="tests/Framework/DataProvider.testQuery.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.css2Xpath.phpt" name="tests/Framework/DomQuery.css2Xpath.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.fromHtml.phpt" name="tests/Framework/DomQuery.fromHtml.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.fromXml.phpt" name="tests/Framework/DomQuery.fromXml.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.dumpException.phpt" name="tests/Framework/Dumper.dumpException.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.color.phpt" name="tests/Framework/Dumper.color.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toLine.phpt" name="tests/Framework/Dumper.toLine.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.recursion.phpt" name="tests/Framework/Dumper.toPhp.recursion.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.phpt" name="tests/Framework/Dumper.toPhp.phpt"/>
|
||||
<testcase classname="tests/Framework/FileMock.phpt" name="tests/Framework/FileMock.phpt"/>
|
||||
<testcase classname="tests/Framework/Helpers.escapeArg.phpt" name="tests/Framework/Helpers.escapeArg.phpt"/>
|
||||
<testcase classname="tests/Framework/Helpers.parseDocComment.phpt" name="tests/Framework/Helpers.parseDocComment.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.phpt" name="tests/Framework/TestCase.annotationThrows.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.setUp.tearDown.phpt" name="tests/Framework/TestCase.annotationThrows.setUp.tearDown.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.syntax.phpt" name="tests/Framework/TestCase.annotationThrows.syntax.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.basic.phpt" name="tests/Framework/TestCase.basic.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.dataProvider.generator.phpt" name="tests/Framework/TestCase.dataProvider.generator.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.dataProvider.phpt" name="tests/Framework/TestCase.dataProvider.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.invalidMethods.phpt" name="tests/Framework/TestCase.invalidMethods.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.invalidProvider.phpt" name="tests/Framework/TestCase.invalidProvider.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.error.phpt" name="tests/Framework/TestCase.order.error.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.errorMuted.phpt" name="tests/Framework/TestCase.order.errorMuted.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.phpt" name="tests/Framework/TestCase.order.phpt"/>
|
||||
<testcase classname="Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt" name="Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt"/>
|
||||
<testcase classname="tests/Runner/CommandLine.phpt" name="tests/Runner/CommandLine.phpt"/>
|
||||
<testcase classname="tests/Runner/HhvmPhpInterpreter.phpt" name="tests/Runner/HhvmPhpInterpreter.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/Runner/Runner.find-tests.phpt" name="tests/Runner/Runner.find-tests.phpt"/>
|
||||
<testcase classname="tests/Runner/Job.phpt" name="tests/Runner/Job.phpt"/>
|
||||
<testcase classname="tests/Runner/ZendPhpExecutable.phpt" name="tests/Runner/ZendPhpExecutable.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.multiple.phpt" name="tests/Runner/Runner.multiple.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.edge.phpt" name="tests/Runner/Runner.edge.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.stop-on-fail.phpt" name="tests/Runner/Runner.stop-on-fail.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.multiple-fails.phpt" name="tests/Runner/Runner.multiple-fails.phpt">
|
||||
<failure message="Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\Assert::match()"/>
|
||||
</testcase>
|
||||
<testcase classname="tests/RunnerOutput/JUnitPrinter.phpt" name="tests/RunnerOutput/JUnitPrinter.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.annotations.phpt" name="tests/Runner/Runner.annotations.phpt"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,23 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="SampleSuite" tests="6" failures="6" time="0.006">
|
||||
<testcase name="testFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Fake.php" line="42" time="0.001">
|
||||
<failure type="Exception" message="Boom">/home/runner/work/repo/src/Fake.php:42</failure>
|
||||
</testcase>
|
||||
<testcase name="testStringFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Other.php" line="10" time="0.001">
|
||||
<failure>/home/runner/work/repo/src/Other.php:10</failure>
|
||||
</testcase>
|
||||
<testcase name="testParenFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Paren.php" line="123" time="0.001">
|
||||
<failure>at /home/runner/work/repo/src/Paren.php(123)</failure>
|
||||
</testcase>
|
||||
<testcase name="testWindowsFailure" classname="SampleSuite" file="C:\repo\src\Win.php" line="77" time="0.001">
|
||||
<failure>C:\repo\src\Win.php:77</failure>
|
||||
</testcase>
|
||||
<testcase name="testWindowsParenFailure" classname="SampleSuite" file="C:\repo\src\WinParen.php" line="88" time="0.001">
|
||||
<failure>at C:\repo\src\WinParen.php(88)</failure>
|
||||
</testcase>
|
||||
<testcase name="testPhptFailure" classname="SampleSuite" file="/home/runner/work/repo/tests/Sample.phpt" line="12" time="0.001">
|
||||
<failure>/home/runner/work/repo/tests/Sample.phpt:12</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,79 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="CLI Arguments" tests="12" assertions="12" errors="0" failures="2" skipped="0" time="0.140397">
|
||||
<testcase name="targeting-traits-with-coversclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt" assertions="1" time="0.068151">
|
||||
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
|
||||
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
|
||||
</testcase>
|
||||
<testcase name="targeting-traits-with-usesclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt" assertions="1" time="0.064268">
|
||||
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
|
||||
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
|
||||
</testcase>
|
||||
<testsuite name="PHPUnit\Event\CollectingDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" tests="2" assertions="2" errors="0" failures="0" skipped="0" time="0.004256">
|
||||
<testcase name="testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="20" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.001441"/>
|
||||
<testcase name="testCollectsDispatchedEventsUntilFlushed" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="27" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.002815"/>
|
||||
</testsuite>
|
||||
<testsuite name="PHPUnit\Event\DeferringDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.002928">
|
||||
<testcase name="testCollectsEventsUntilFlush" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="22" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.001672"/>
|
||||
<testcase name="testFlushesCollectedEvents" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="35" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000661"/>
|
||||
<testcase name="testSubscriberCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="53" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000334"/>
|
||||
<testcase name="testTracerCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="69" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000262"/>
|
||||
</testsuite>
|
||||
<testsuite name="PHPUnit\Event\DirectDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.000794">
|
||||
<testcase name="testDispatchesEventToKnownSubscribers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="24" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000170"/>
|
||||
<testcase name="testDispatchesEventToTracers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="43" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000248"/>
|
||||
<testcase name="testRegisterRejectsUnknownSubscriber" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="62" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000257"/>
|
||||
<testcase name="testDispatchRejectsUnknownEventType" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="73" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000119"/>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -73,46 +73,6 @@ describe('java-junit tests', () => {
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from testmo/junitxml basic example matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from testmo/junitxml complete example matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses empty failures in test results', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
|
||||
@@ -1,347 +0,0 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {PhpunitJunitParser} from '../src/parsers/phpunit-junit/phpunit-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('phpunit-junit tests', () => {
|
||||
it('produces empty test run result when there are no test cases', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('report from phpunit test results matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses nested test suites correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Should have 4 test suites (3 nested ones plus the parent with direct testcases)
|
||||
expect(result.suites.length).toBe(4)
|
||||
|
||||
// Verify suite names
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\CollectingDispatcherTest')
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\DeferringDispatcherTest')
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\DirectDispatcherTest')
|
||||
expect(suiteNames).toContain('CLI Arguments')
|
||||
|
||||
// Verify total test count
|
||||
expect(result.tests).toBe(12)
|
||||
expect(result.passed).toBe(10)
|
||||
expect(result.failed).toBe(2)
|
||||
})
|
||||
|
||||
it('extracts error details from failures', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the CLI Arguments suite which has failures
|
||||
const cliSuite = result.suites.find(s => s.name === 'CLI Arguments')
|
||||
expect(cliSuite).toBeDefined()
|
||||
|
||||
// Get the failed tests
|
||||
const failedTests = cliSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
|
||||
expect(failedTests.length).toBe(2)
|
||||
|
||||
// Verify error details are captured
|
||||
for (const test of failedTests) {
|
||||
expect(test.error).toBeDefined()
|
||||
expect(test.error!.details).toContain('Failed asserting that string matches format description')
|
||||
}
|
||||
})
|
||||
|
||||
it('maps absolute paths to tracked files for annotations', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit-paths.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: [
|
||||
'src/Fake.php',
|
||||
'src/Other.php',
|
||||
'src/Paren.php',
|
||||
'src/Win.php',
|
||||
'src/WinParen.php',
|
||||
'tests/Sample.phpt'
|
||||
]
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
const suite = result.suites.find(s => s.name === 'SampleSuite')
|
||||
expect(suite).toBeDefined()
|
||||
|
||||
const tests = suite!.groups.flatMap(g => g.tests)
|
||||
const fileFailure = tests.find(t => t.name === 'testFailure')
|
||||
expect(fileFailure).toBeDefined()
|
||||
expect(fileFailure!.error).toBeDefined()
|
||||
expect(fileFailure!.error!.path).toBe('src/Fake.php')
|
||||
expect(fileFailure!.error!.line).toBe(42)
|
||||
|
||||
const stringFailure = tests.find(t => t.name === 'testStringFailure')
|
||||
expect(stringFailure).toBeDefined()
|
||||
expect(stringFailure!.error).toBeDefined()
|
||||
expect(stringFailure!.error!.path).toBe('src/Other.php')
|
||||
expect(stringFailure!.error!.line).toBe(10)
|
||||
|
||||
const parenFailure = tests.find(t => t.name === 'testParenFailure')
|
||||
expect(parenFailure).toBeDefined()
|
||||
expect(parenFailure!.error).toBeDefined()
|
||||
expect(parenFailure!.error!.path).toBe('src/Paren.php')
|
||||
expect(parenFailure!.error!.line).toBe(123)
|
||||
|
||||
const windowsFailure = tests.find(t => t.name === 'testWindowsFailure')
|
||||
expect(windowsFailure).toBeDefined()
|
||||
expect(windowsFailure!.error).toBeDefined()
|
||||
expect(windowsFailure!.error!.path).toBe('src/Win.php')
|
||||
expect(windowsFailure!.error!.line).toBe(77)
|
||||
|
||||
const windowsParenFailure = tests.find(t => t.name === 'testWindowsParenFailure')
|
||||
expect(windowsParenFailure).toBeDefined()
|
||||
expect(windowsParenFailure!.error).toBeDefined()
|
||||
expect(windowsParenFailure!.error!.path).toBe('src/WinParen.php')
|
||||
expect(windowsParenFailure!.error!.line).toBe(88)
|
||||
|
||||
const phptFailure = tests.find(t => t.name === 'testPhptFailure')
|
||||
expect(phptFailure).toBeDefined()
|
||||
expect(phptFailure!.error).toBeDefined()
|
||||
expect(phptFailure!.error!.path).toBe('tests/Sample.phpt')
|
||||
expect(phptFailure!.error!.line).toBe(12)
|
||||
})
|
||||
|
||||
it('parses junit-basic.xml with nested suites and failure', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts
|
||||
expect(result.tests).toBe(9)
|
||||
expect(result.passed).toBe(8)
|
||||
expect(result.failed).toBe(1)
|
||||
expect(result.result).toBe('failed')
|
||||
|
||||
// Verify suites - should have Tests.Registration, Tests.Authentication.Login, and Tests.Authentication
|
||||
expect(result.suites.length).toBe(3)
|
||||
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('Tests.Registration')
|
||||
expect(suiteNames).toContain('Tests.Authentication.Login')
|
||||
expect(suiteNames).toContain('Tests.Authentication')
|
||||
|
||||
// Verify the Registration suite has 3 tests
|
||||
const registrationSuite = result.suites.find(s => s.name === 'Tests.Registration')
|
||||
expect(registrationSuite).toBeDefined()
|
||||
const registrationTests = registrationSuite!.groups.flatMap(g => g.tests)
|
||||
expect(registrationTests.length).toBe(3)
|
||||
|
||||
// Verify the Authentication suite has 3 direct tests (not counting nested suite)
|
||||
const authSuite = result.suites.find(s => s.name === 'Tests.Authentication')
|
||||
expect(authSuite).toBeDefined()
|
||||
const authTests = authSuite!.groups.flatMap(g => g.tests)
|
||||
expect(authTests.length).toBe(3)
|
||||
|
||||
// Verify the Login nested suite has 3 tests
|
||||
const loginSuite = result.suites.find(s => s.name === 'Tests.Authentication.Login')
|
||||
expect(loginSuite).toBeDefined()
|
||||
const loginTests = loginSuite!.groups.flatMap(g => g.tests)
|
||||
expect(loginTests.length).toBe(3)
|
||||
|
||||
// Verify failure is captured
|
||||
const failedTest = authTests.find(t => t.name === 'testCase9')
|
||||
expect(failedTest).toBeDefined()
|
||||
expect(failedTest!.result).toBe('failed')
|
||||
expect(failedTest!.error).toBeDefined()
|
||||
expect(failedTest!.error!.message).toBe('AssertionError: Assertion error message')
|
||||
})
|
||||
|
||||
it('parses phpcheckstyle-phpunit.xml with deeply nested suites', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts from the XML: tests="30", failures="2"
|
||||
expect(result.tests).toBe(30)
|
||||
expect(result.passed).toBe(28)
|
||||
expect(result.failed).toBe(2)
|
||||
expect(result.result).toBe('failed')
|
||||
|
||||
// Verify the number of test suites extracted (leaf suites with testcases)
|
||||
// CommentsTest, DeprecationTest, GoodTest, IndentationTest, MetricsTest,
|
||||
// NamingTest, OptimizationTest, OtherTest, PHPTagsTest, ProhibitedTest,
|
||||
// StrictCompareTest, UnusedTest = 12 suites
|
||||
expect(result.suites.length).toBe(12)
|
||||
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('CommentsTest')
|
||||
expect(suiteNames).toContain('GoodTest')
|
||||
expect(suiteNames).toContain('IndentationTest')
|
||||
expect(suiteNames).toContain('OtherTest')
|
||||
})
|
||||
|
||||
it('extracts test data from phpcheckstyle-phpunit.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the CommentsTest suite
|
||||
const commentsSuite = result.suites.find(s => s.name === 'CommentsTest')
|
||||
expect(commentsSuite).toBeDefined()
|
||||
|
||||
// Verify tests are extracted correctly
|
||||
const tests = commentsSuite!.groups.flatMap(g => g.tests)
|
||||
expect(tests.length).toBe(3)
|
||||
|
||||
const testGoodDoc = tests.find(t => t.name === 'testGoodDoc')
|
||||
expect(testGoodDoc).toBeDefined()
|
||||
expect(testGoodDoc!.result).toBe('success')
|
||||
})
|
||||
|
||||
it('captures failure details from phpcheckstyle-phpunit.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the OtherTest suite which has failures
|
||||
const otherSuite = result.suites.find(s => s.name === 'OtherTest')
|
||||
expect(otherSuite).toBeDefined()
|
||||
|
||||
const failedTests = otherSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
|
||||
expect(failedTests.length).toBe(2)
|
||||
|
||||
// Verify failure details
|
||||
const testOther = failedTests.find(t => t.name === 'testOther')
|
||||
expect(testOther).toBeDefined()
|
||||
expect(testOther!.error).toBeDefined()
|
||||
expect(testOther!.error!.details).toContain('We expect 20 warnings')
|
||||
expect(testOther!.error!.details).toContain('Failed asserting that 19 matches expected 20')
|
||||
|
||||
const testException = failedTests.find(t => t.name === 'testException')
|
||||
expect(testException).toBeDefined()
|
||||
expect(testException!.error).toBeDefined()
|
||||
expect(testException!.error!.details).toContain('We expect 1 error')
|
||||
})
|
||||
|
||||
it('report from junit-basic.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-junit-basic-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from phpcheckstyle-phpunit.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-phpcheckstyle-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
@@ -1,224 +0,0 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {NetteTesterJunitParser} from '../src/parsers/tester-junit/tester-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('tester-junit tests', () => {
|
||||
it('produces empty test run result when there are no test cases', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('report from tester-v1.7-report.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'tester-v1.7-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses tester-v1.7-report.xml correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts from XML: tests="65" errors="1" skipped="3"
|
||||
expect(result.tests).toBe(65)
|
||||
expect(result.failed).toBe(1)
|
||||
expect(result.skipped).toBe(3)
|
||||
expect(result.passed).toBe(61)
|
||||
|
||||
// Verify suite name uses file name
|
||||
expect(result.suites.length).toBe(1)
|
||||
expect(result.suites[0].name).toBe('tester-v1.7-report.xml')
|
||||
})
|
||||
|
||||
it('groups tests by directory structure', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Get all group names
|
||||
const groupNames = result.suites[0].groups.map(g => g.name)
|
||||
|
||||
// Verify expected directory groups exist
|
||||
expect(groupNames).toContain('tests/Framework')
|
||||
expect(groupNames).toContain('tests/CodeCoverage')
|
||||
expect(groupNames).toContain('tests/Runner')
|
||||
expect(groupNames).toContain('tests/RunnerOutput')
|
||||
})
|
||||
|
||||
it('parses test names with method suffixes correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the Framework group which has tests with method suffixes
|
||||
const frameworkGroup = result.suites[0].groups.find(g => g.name === 'tests/Framework')
|
||||
expect(frameworkGroup).toBeDefined()
|
||||
|
||||
// Find tests with method suffixes
|
||||
const testWithMethod = frameworkGroup!.tests.find(t => t.name.includes('::testSimple'))
|
||||
expect(testWithMethod).toBeDefined()
|
||||
expect(testWithMethod!.name).toBe('Assert.equal.recursive.phpt::testSimple')
|
||||
})
|
||||
|
||||
it('parses complex test names from BootstrapFormRenderer-report.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'BootstrapFormRenderer-report.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'tester-bootstrap-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts: 4 tests, all passed
|
||||
expect(result.tests).toBe(4)
|
||||
expect(result.passed).toBe(4)
|
||||
expect(result.failed).toBe(0)
|
||||
expect(result.skipped).toBe(0)
|
||||
|
||||
// Verify suite name
|
||||
expect(result.suites[0].name).toBe('BootstrapFormRenderer-report.xml')
|
||||
|
||||
// All tests should have method names
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
expect(allTests.every(t => t.name.includes('::'))).toBe(true)
|
||||
expect(allTests.some(t => t.name.includes('::testRenderingBasics'))).toBe(true)
|
||||
expect(allTests.some(t => t.name.includes('::testRenderingIndividual'))).toBe(true)
|
||||
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('extracts error details from failures', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the failed test
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
const failedTests = allTests.filter(t => t.result === 'failed')
|
||||
|
||||
expect(failedTests.length).toBe(1)
|
||||
|
||||
// Verify error details are captured
|
||||
const failedTest = failedTests[0]
|
||||
expect(failedTest.error).toBeDefined()
|
||||
expect(failedTest.error!.details).toContain('Failed:')
|
||||
expect(failedTest.error!.details).toContain('multiple-fails')
|
||||
})
|
||||
|
||||
it('correctly identifies skipped tests', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find skipped tests
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
const skippedTests = allTests.filter(t => t.result === 'skipped')
|
||||
|
||||
expect(skippedTests.length).toBe(3)
|
||||
|
||||
// Verify some known skipped tests
|
||||
expect(skippedTests.some(t => t.name.includes('Dumper.toPhp.php7.phpt'))).toBe(true)
|
||||
expect(skippedTests.some(t => t.name.includes('Collector.start.phpt'))).toBe(true)
|
||||
})
|
||||
|
||||
it('parses test with description prefix correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find test with description prefix
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
// The test name is generated from the basename, and the description is shown in parentheses
|
||||
const testWithDescription = allTests.find(t => t.name.includes('Prevent loop'))
|
||||
|
||||
expect(testWithDescription).toBeDefined()
|
||||
expect(testWithDescription!.name).toContain('Prevent loop')
|
||||
expect(testWithDescription!.name).toContain('TestCase.ownErrorHandler.phpt')
|
||||
})
|
||||
})
|
||||
@@ -32,8 +32,6 @@ inputs:
|
||||
- java-junit
|
||||
- jest-junit
|
||||
- mocha-json
|
||||
- tester-junit
|
||||
- phpunit-junit
|
||||
- python-xunit
|
||||
- rspec-json
|
||||
- swift-xunit
|
||||
|
||||
519
dist/index.js
generated
vendored
519
dist/index.js
generated
vendored
@@ -277,11 +277,9 @@ const golang_json_parser_1 = __nccwpck_require__(5162);
|
||||
const java_junit_parser_1 = __nccwpck_require__(8342);
|
||||
const jest_junit_parser_1 = __nccwpck_require__(1042);
|
||||
const mocha_json_parser_1 = __nccwpck_require__(5402);
|
||||
const phpunit_junit_parser_1 = __nccwpck_require__(2674);
|
||||
const python_xunit_parser_1 = __nccwpck_require__(6578);
|
||||
const rspec_json_parser_1 = __nccwpck_require__(9768);
|
||||
const swift_xunit_parser_1 = __nccwpck_require__(7330);
|
||||
const tester_junit_parser_1 = __nccwpck_require__(7816);
|
||||
const path_utils_1 = __nccwpck_require__(9132);
|
||||
const github_utils_1 = __nccwpck_require__(6667);
|
||||
async function main() {
|
||||
@@ -496,16 +494,12 @@ class TestReporter {
|
||||
return new jest_junit_parser_1.JestJunitParser(options);
|
||||
case 'mocha-json':
|
||||
return new mocha_json_parser_1.MochaJsonParser(options);
|
||||
case 'phpunit-junit':
|
||||
return new phpunit_junit_parser_1.PhpunitJunitParser(options);
|
||||
case 'python-xunit':
|
||||
return new python_xunit_parser_1.PythonXunitParser(options);
|
||||
case 'rspec-json':
|
||||
return new rspec_json_parser_1.RspecJsonParser(options);
|
||||
case 'swift-xunit':
|
||||
return new swift_xunit_parser_1.SwiftXunitParser(options);
|
||||
case 'tester-junit':
|
||||
return new tester_junit_parser_1.NetteTesterJunitParser(options);
|
||||
default:
|
||||
throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);
|
||||
}
|
||||
@@ -732,12 +726,12 @@ class DartJsonParser {
|
||||
getRelativePath(path) {
|
||||
const prefix = 'file://';
|
||||
if (path.startsWith(prefix)) {
|
||||
path = path.substring(prefix.length);
|
||||
path = path.substr(prefix.length);
|
||||
}
|
||||
path = (0, path_utils_1.normalizeFilePath)(path);
|
||||
const workDir = this.getWorkDir(path);
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length);
|
||||
path = path.substr(workDir.length);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
@@ -896,7 +890,7 @@ class DotnetNunitParser {
|
||||
path = (0, path_utils_1.normalizeFilePath)(path);
|
||||
const workDir = this.getWorkDir(path);
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length);
|
||||
path = path.substr(workDir.length);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
@@ -1000,7 +994,7 @@ class DotnetTrxParser {
|
||||
const duration = durationAttr ? (0, parse_utils_1.parseNetDuration)(durationAttr) : 0;
|
||||
const resultTestName = r.result.$.testName;
|
||||
const testName = resultTestName.startsWith(className) && resultTestName[className.length] === '.'
|
||||
? resultTestName.substring(className.length + 1)
|
||||
? resultTestName.substr(className.length + 1)
|
||||
: resultTestName;
|
||||
const test = new Test(testName, r.result.$.outcome, duration, error);
|
||||
tc.tests.push(test);
|
||||
@@ -1067,7 +1061,7 @@ class DotnetTrxParser {
|
||||
const filePath = (0, path_utils_1.normalizeFilePath)(fileStr);
|
||||
const workDir = this.getWorkDir(filePath);
|
||||
if (workDir) {
|
||||
const file = filePath.substring(workDir.length);
|
||||
const file = filePath.substr(workDir.length);
|
||||
if (trackedFiles.includes(file)) {
|
||||
const line = parseInt(lineStr);
|
||||
return { path: file, line };
|
||||
@@ -1553,7 +1547,7 @@ class JestJunitParser {
|
||||
path = (0, path_utils_1.normalizeFilePath)(path);
|
||||
const workDir = this.getWorkDir(path);
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length);
|
||||
path = path.substr(workDir.length);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
@@ -1624,7 +1618,7 @@ class MochaJsonParser {
|
||||
}
|
||||
processTest(suite, test, result) {
|
||||
const groupName = test.fullTitle !== test.title
|
||||
? test.fullTitle.substring(0, test.fullTitle.length - test.title.length).trimEnd()
|
||||
? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd()
|
||||
: null;
|
||||
let group = suite.groups.find(grp => grp.name === groupName);
|
||||
if (group === undefined) {
|
||||
@@ -1659,7 +1653,7 @@ class MochaJsonParser {
|
||||
path = (0, path_utils_1.normalizeFilePath)(path);
|
||||
const workDir = this.getWorkDir(path);
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length);
|
||||
path = path.substr(workDir.length);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
@@ -1672,241 +1666,6 @@ class MochaJsonParser {
|
||||
exports.MochaJsonParser = MochaJsonParser;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2674:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.PhpunitJunitParser = void 0;
|
||||
const xml2js_1 = __nccwpck_require__(758);
|
||||
const path_utils_1 = __nccwpck_require__(9132);
|
||||
const test_results_1 = __nccwpck_require__(613);
|
||||
class PhpunitJunitParser {
|
||||
options;
|
||||
trackedFiles;
|
||||
trackedFilesList;
|
||||
assumedWorkDir;
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.trackedFilesList = options.trackedFiles.map(f => (0, path_utils_1.normalizeFilePath)(f));
|
||||
this.trackedFiles = new Set(this.trackedFilesList);
|
||||
}
|
||||
async parse(filePath, content) {
|
||||
const reportOrSuite = await this.getPhpunitReport(filePath, content);
|
||||
const isReport = reportOrSuite.testsuites !== undefined;
|
||||
// XML might contain:
|
||||
// - multiple suites under <testsuites> root node
|
||||
// - single <testsuite> as root node
|
||||
let report;
|
||||
if (isReport) {
|
||||
report = reportOrSuite;
|
||||
}
|
||||
else {
|
||||
// Make it behave the same way as if suite was inside <testsuites> root node
|
||||
const suite = reportOrSuite.testsuite;
|
||||
report = {
|
||||
testsuites: {
|
||||
$: { time: suite.$.time },
|
||||
testsuite: [suite]
|
||||
}
|
||||
};
|
||||
}
|
||||
return this.getTestRunResult(filePath, report);
|
||||
}
|
||||
async getPhpunitReport(filePath, content) {
|
||||
try {
|
||||
return await (0, xml2js_1.parseStringPromise)(content);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Invalid XML at ${filePath}\n\n${e}`);
|
||||
}
|
||||
}
|
||||
getTestRunResult(filePath, report) {
|
||||
const suites = [];
|
||||
this.collectSuites(suites, report.testsuites.testsuite ?? []);
|
||||
const seconds = parseFloat(report.testsuites.$?.time ?? '');
|
||||
const time = isNaN(seconds) ? undefined : seconds * 1000;
|
||||
return new test_results_1.TestRunResult(filePath, suites, time);
|
||||
}
|
||||
collectSuites(results, testsuites) {
|
||||
for (const ts of testsuites) {
|
||||
// Recursively process nested test suites first (depth-first)
|
||||
if (ts.testsuite) {
|
||||
this.collectSuites(results, ts.testsuite);
|
||||
}
|
||||
// Only add suites that have direct test cases
|
||||
// This avoids adding container suites that only hold nested suites
|
||||
if (ts.testcase && ts.testcase.length > 0) {
|
||||
const name = ts.$.name.trim();
|
||||
const time = parseFloat(ts.$.time) * 1000;
|
||||
results.push(new test_results_1.TestSuiteResult(name, this.getGroups(ts), time));
|
||||
}
|
||||
}
|
||||
}
|
||||
getGroups(suite) {
|
||||
if (!suite.testcase || suite.testcase.length === 0) {
|
||||
return [];
|
||||
}
|
||||
const groups = [];
|
||||
for (const tc of suite.testcase) {
|
||||
// Use classname (PHPUnit style) for grouping
|
||||
// If classname matches suite name, use empty string to avoid redundancy
|
||||
const className = tc.$.classname ?? tc.$.class ?? '';
|
||||
const groupName = className === suite.$.name ? '' : className;
|
||||
let grp = groups.find(g => g.name === groupName);
|
||||
if (grp === undefined) {
|
||||
grp = { name: groupName, tests: [] };
|
||||
groups.push(grp);
|
||||
}
|
||||
grp.tests.push(tc);
|
||||
}
|
||||
return groups.map(grp => {
|
||||
const tests = grp.tests.map(tc => {
|
||||
const name = tc.$.name.trim();
|
||||
const result = this.getTestCaseResult(tc);
|
||||
const time = parseFloat(tc.$.time) * 1000;
|
||||
const error = this.getTestCaseError(tc);
|
||||
return new test_results_1.TestCaseResult(name, result, time, error);
|
||||
});
|
||||
return new test_results_1.TestGroupResult(grp.name, tests);
|
||||
});
|
||||
}
|
||||
getTestCaseResult(test) {
|
||||
if (test.failure || test.error)
|
||||
return 'failed';
|
||||
if (test.skipped)
|
||||
return 'skipped';
|
||||
return 'success';
|
||||
}
|
||||
getTestCaseError(tc) {
|
||||
if (!this.options.parseErrors) {
|
||||
return undefined;
|
||||
}
|
||||
// We process <error> and <failure> the same way
|
||||
const failures = tc.failure ?? tc.error;
|
||||
if (!failures || failures.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
const failure = failures[0];
|
||||
const details = typeof failure === 'string' ? failure : failure._ ?? '';
|
||||
// PHPUnit provides file path directly in testcase attributes
|
||||
let filePath;
|
||||
let line;
|
||||
if (tc.$.file) {
|
||||
const relativePath = this.getRelativePath(tc.$.file);
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
filePath = relativePath;
|
||||
}
|
||||
if (tc.$.line) {
|
||||
line = parseInt(tc.$.line);
|
||||
}
|
||||
}
|
||||
// If file not in tracked files, try to extract from error details
|
||||
if (!filePath && details) {
|
||||
const extracted = this.extractFileAndLine(details);
|
||||
if (extracted) {
|
||||
filePath = extracted.filePath;
|
||||
line = extracted.line;
|
||||
}
|
||||
}
|
||||
let message;
|
||||
if (typeof failure !== 'string' && failure.$) {
|
||||
message = failure.$.message;
|
||||
if (failure.$.type) {
|
||||
message = message ? `${failure.$.type}: ${message}` : failure.$.type;
|
||||
}
|
||||
}
|
||||
return {
|
||||
path: filePath,
|
||||
line,
|
||||
details,
|
||||
message
|
||||
};
|
||||
}
|
||||
extractFileAndLine(details) {
|
||||
// PHPUnit stack traces typically have format: /path/to/file.php:123
|
||||
const lines = details.split(/\r?\n/);
|
||||
for (const str of lines) {
|
||||
// Match patterns like /path/to/file.php:123 or at /path/to/file.php(123)
|
||||
const matchColon = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/);
|
||||
if (matchColon) {
|
||||
const relativePath = this.getRelativePath(matchColon[1]);
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
return { filePath: relativePath, line: parseInt(matchColon[2]) };
|
||||
}
|
||||
}
|
||||
const matchParen = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt))\((\d+)\)/);
|
||||
if (matchParen) {
|
||||
const relativePath = this.getRelativePath(matchParen[1]);
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
return { filePath: relativePath, line: parseInt(matchParen[2]) };
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Converts an absolute file path to a relative path by stripping the working directory prefix.
|
||||
*
|
||||
* @param path - The absolute file path from PHPUnit output (e.g., `/home/runner/work/repo/src/Test.php`)
|
||||
* @returns The relative path (e.g., `src/Test.php`) if a working directory can be determined,
|
||||
* otherwise returns the normalized original path
|
||||
*/
|
||||
getRelativePath(path) {
|
||||
path = (0, path_utils_1.normalizeFilePath)(path);
|
||||
const workDir = this.getWorkDir(path);
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
/**
|
||||
* Determines the working directory prefix to strip from absolute file paths.
|
||||
*
|
||||
* The working directory is resolved using the following priority:
|
||||
*
|
||||
* 1. **Explicit configuration** - If `options.workDir` is set, it takes precedence.
|
||||
* This allows users to explicitly specify the working directory.
|
||||
*
|
||||
* 2. **Cached assumption** - If we've previously determined a working directory
|
||||
* (`assumedWorkDir`) and the current path starts with it, we reuse that value.
|
||||
* This avoids redundant computation for subsequent paths.
|
||||
*
|
||||
* 3. **Heuristic detection** - Uses `getBasePath()` to find the common prefix between
|
||||
* the absolute path and the list of tracked files in the repository. For example:
|
||||
* - Absolute path: `/home/runner/work/repo/src/Test.php`
|
||||
* - Tracked file: `src/Test.php`
|
||||
* - Detected workDir: `/home/runner/work/repo/`
|
||||
*
|
||||
* Once detected, the working directory is cached in `assumedWorkDir` for efficiency.
|
||||
*
|
||||
* @param path - The normalized absolute file path to analyze
|
||||
* @returns The working directory prefix (with trailing slash), or `undefined` if it cannot be determined
|
||||
*
|
||||
* @example
|
||||
* // With tracked file 'src/Foo.php' and path '/home/runner/work/repo/src/Foo.php'
|
||||
* // Returns: '/home/runner/work/repo/'
|
||||
*/
|
||||
getWorkDir(path) {
|
||||
if (this.options.workDir) {
|
||||
return this.options.workDir;
|
||||
}
|
||||
if (this.assumedWorkDir && path.startsWith(this.assumedWorkDir)) {
|
||||
return this.assumedWorkDir;
|
||||
}
|
||||
const basePath = (0, path_utils_1.getBasePath)(path, this.trackedFilesList);
|
||||
if (basePath !== undefined) {
|
||||
this.assumedWorkDir = basePath;
|
||||
}
|
||||
return basePath;
|
||||
}
|
||||
}
|
||||
exports.PhpunitJunitParser = PhpunitJunitParser;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6578:
|
||||
@@ -1980,7 +1739,7 @@ class RspecJsonParser {
|
||||
}
|
||||
processTest(suite, test, result) {
|
||||
const groupName = test.full_description !== test.description
|
||||
? test.full_description.substring(0, test.full_description.length - test.description.length).trimEnd()
|
||||
? test.full_description.substr(0, test.full_description.length - test.description.length).trimEnd()
|
||||
: null;
|
||||
let group = suite.groups.find(grp => grp.name === groupName);
|
||||
if (group === undefined) {
|
||||
@@ -2050,262 +1809,6 @@ class SwiftXunitParser extends java_junit_parser_1.JavaJunitParser {
|
||||
exports.SwiftXunitParser = SwiftXunitParser;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7816:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.NetteTesterJunitParser = void 0;
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const xml2js_1 = __nccwpck_require__(758);
|
||||
const path_utils_1 = __nccwpck_require__(9132);
|
||||
const test_results_1 = __nccwpck_require__(613);
|
||||
class NetteTesterJunitParser {
|
||||
options;
|
||||
trackedFiles;
|
||||
trackedFilesList;
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.trackedFilesList = options.trackedFiles.map(f => (0, path_utils_1.normalizeFilePath)(f));
|
||||
this.trackedFiles = new Set(this.trackedFilesList);
|
||||
}
|
||||
async parse(filePath, content) {
|
||||
const reportOrSuite = await this.getNetteTesterReport(filePath, content);
|
||||
const isReport = reportOrSuite.testsuites !== undefined;
|
||||
// XML might contain:
|
||||
// - multiple suites under <testsuites> root node
|
||||
// - single <testsuite> as root node
|
||||
let report;
|
||||
if (isReport) {
|
||||
report = reportOrSuite;
|
||||
}
|
||||
else {
|
||||
// Make it behave the same way as if suite was inside <testsuites> root node
|
||||
const suite = reportOrSuite.testsuite;
|
||||
report = {
|
||||
testsuites: {
|
||||
$: { time: suite.$.time },
|
||||
testsuite: [suite]
|
||||
}
|
||||
};
|
||||
}
|
||||
return this.getTestRunResult(filePath, report);
|
||||
}
|
||||
async getNetteTesterReport(filePath, content) {
|
||||
try {
|
||||
return await (0, xml2js_1.parseStringPromise)(content);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Invalid XML at ${filePath}\n\n${e}`);
|
||||
}
|
||||
}
|
||||
getTestRunResult(filePath, report) {
|
||||
const suites = report.testsuites.testsuite === undefined
|
||||
? []
|
||||
: report.testsuites.testsuite.map((ts, index) => {
|
||||
// Use report file name as suite name (user preference)
|
||||
const fileName = path.basename(filePath);
|
||||
// If there are multiple test suites, add index to distinguish them
|
||||
const name = report.testsuites.testsuite && report.testsuites.testsuite.length > 1
|
||||
? `${fileName} #${index + 1}`
|
||||
: fileName;
|
||||
const time = parseFloat(ts.$.time) * 1000;
|
||||
const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);
|
||||
return sr;
|
||||
});
|
||||
const seconds = parseFloat(report.testsuites.$?.time ?? '');
|
||||
const time = isNaN(seconds) ? undefined : seconds * 1000;
|
||||
return new test_results_1.TestRunResult(filePath, suites, time);
|
||||
}
|
||||
getGroups(suite) {
|
||||
if (!suite.testcase || suite.testcase.length === 0) {
|
||||
return [];
|
||||
}
|
||||
// Group tests by directory structure
|
||||
const groups = new Map();
|
||||
for (const tc of suite.testcase) {
|
||||
const parsed = this.parseTestCaseName(tc.$.classname);
|
||||
const directory = path.dirname(parsed.filePath);
|
||||
if (!groups.has(directory)) {
|
||||
groups.set(directory, []);
|
||||
}
|
||||
groups.get(directory).push(tc);
|
||||
}
|
||||
return Array.from(groups.entries()).map(([dir, tests]) => {
|
||||
const testResults = tests.map(tc => {
|
||||
const parsed = this.parseTestCaseName(tc.$.classname);
|
||||
const result = this.getTestCaseResult(tc);
|
||||
const time = parseFloat(tc.$.time || '0') * 1000;
|
||||
const error = this.getTestCaseError(tc, parsed.filePath);
|
||||
return new test_results_1.TestCaseResult(parsed.displayName, result, time, error);
|
||||
});
|
||||
return new test_results_1.TestGroupResult(dir, testResults);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse test case name from classname attribute.
|
||||
*
|
||||
* Handles multiple patterns:
|
||||
* 1. Simple: "tests/Framework/Assert.equal.phpt"
|
||||
* 2. With method: "tests/Framework/Assert.equal.recursive.phpt [method=testSimple]"
|
||||
* 3. With description: "Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt"
|
||||
* 4. With class and method: "Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]"
|
||||
*/
|
||||
parseTestCaseName(classname) {
|
||||
let filePath = classname;
|
||||
let method;
|
||||
let description;
|
||||
let className;
|
||||
// Pattern: "Description | filepath [method=methodName]"
|
||||
// or "ClassName | filepath [method=methodName]"
|
||||
const pipePattern = /^(.+?)\s*\|\s*(.+?)(?:\s*\[method=(.+?)\])?$/;
|
||||
const pipeMatch = classname.match(pipePattern);
|
||||
if (pipeMatch) {
|
||||
const prefix = pipeMatch[1].trim();
|
||||
filePath = pipeMatch[2].trim();
|
||||
method = pipeMatch[3];
|
||||
// Check if prefix looks like a class name (contains backslash AND ends with dot)
|
||||
// Examples: "Kdyby\BootstrapFormRenderer\BootstrapRenderer."
|
||||
// vs description: "Prevent loop in error handling. The #268 regression."
|
||||
if (prefix.includes('\\') && prefix.endsWith('.')) {
|
||||
className = prefix;
|
||||
}
|
||||
else {
|
||||
description = prefix;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Pattern: "filepath [method=methodName]"
|
||||
const methodPattern = /^(.+?)\s*\[method=(.+?)\]$/;
|
||||
const methodMatch = classname.match(methodPattern);
|
||||
if (methodMatch) {
|
||||
filePath = methodMatch[1].trim();
|
||||
method = methodMatch[2].trim();
|
||||
}
|
||||
}
|
||||
// Generate display name
|
||||
const baseName = path.basename(filePath);
|
||||
let displayName = baseName;
|
||||
if (method) {
|
||||
displayName = `${baseName}::${method}`;
|
||||
}
|
||||
if (description) {
|
||||
displayName = `${description} (${baseName})`;
|
||||
}
|
||||
else if (className && method) {
|
||||
// For class names, keep them but still show the file
|
||||
displayName = `${baseName}::${method}`;
|
||||
}
|
||||
return { filePath, method, description, className, displayName };
|
||||
}
|
||||
getTestCaseResult(test) {
|
||||
if (test.failure || test.error)
|
||||
return 'failed';
|
||||
if (test.skipped)
|
||||
return 'skipped';
|
||||
return 'success';
|
||||
}
|
||||
getTestCaseError(tc, filePath) {
|
||||
if (!this.options.parseErrors) {
|
||||
return undefined;
|
||||
}
|
||||
// We process <error> and <failure> the same way
|
||||
const failures = tc.failure ?? tc.error;
|
||||
if (!failures || failures.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
const failure = failures[0];
|
||||
// For Nette Tester, details are in the message attribute, not as inner text
|
||||
const details = typeof failure === 'string' ? failure : failure._ ?? failure.$?.message ?? '';
|
||||
// Try to extract file path and line from error details
|
||||
let errorFilePath;
|
||||
let line;
|
||||
if (details) {
|
||||
const extracted = this.extractFileAndLine(details);
|
||||
if (extracted) {
|
||||
errorFilePath = extracted.filePath;
|
||||
line = extracted.line;
|
||||
}
|
||||
}
|
||||
// Fallback: use test file path if tracked
|
||||
if (!errorFilePath) {
|
||||
const normalized = (0, path_utils_1.normalizeFilePath)(filePath);
|
||||
if (this.trackedFiles.has(normalized)) {
|
||||
errorFilePath = normalized;
|
||||
}
|
||||
}
|
||||
let message;
|
||||
if (typeof failure !== 'string' && failure.$) {
|
||||
message = failure.$.message;
|
||||
if (failure.$.type) {
|
||||
message = message ? `${failure.$.type}: ${message}` : failure.$.type;
|
||||
}
|
||||
}
|
||||
return {
|
||||
path: errorFilePath,
|
||||
line,
|
||||
details,
|
||||
message
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Extract file path and line number from error details.
|
||||
* Matches patterns like: /path/to/file.phpt:123 or /path/to/file.php:456
|
||||
*/
|
||||
extractFileAndLine(details) {
|
||||
const lines = details.split(/\r?\n/);
|
||||
for (const str of lines) {
|
||||
// Match PHP file patterns: /path/to/file.phpt:123 or /path/to/file.php:456
|
||||
const match = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/);
|
||||
if (match) {
|
||||
const normalized = (0, path_utils_1.normalizeFilePath)(match[1]);
|
||||
if (this.trackedFiles.has(normalized)) {
|
||||
return { filePath: normalized, line: parseInt(match[2]) };
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
exports.NetteTesterJunitParser = NetteTesterJunitParser;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4400:
|
||||
@@ -3082,7 +2585,7 @@ function ellipsis(text, maxLength) {
|
||||
if (text.length <= maxLength) {
|
||||
return text;
|
||||
}
|
||||
return text.substring(0, maxLength - 3) + '...';
|
||||
return text.substr(0, maxLength - 3) + '...';
|
||||
}
|
||||
function formatTime(ms) {
|
||||
if (ms > 1000) {
|
||||
@@ -3201,7 +2704,7 @@ function getBasePath(path, trackedFiles) {
|
||||
if (max === '') {
|
||||
return undefined;
|
||||
}
|
||||
const base = path.substring(0, path.length - max.length);
|
||||
const base = path.substr(0, path.length - max.length);
|
||||
return base;
|
||||
}
|
||||
|
||||
|
||||
14
package-lock.json
generated
14
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "test-reporter",
|
||||
"version": "2.5.0",
|
||||
"version": "2.3.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "test-reporter",
|
||||
"version": "2.5.0",
|
||||
"version": "2.3.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
@@ -146,7 +146,6 @@
|
||||
"integrity": "sha512-BU2f9tlKQ5CAthiMIgpzAh4eDTLWo1mqi9jqE2OxMG0E/OM199VJt2q8BztTxpnSW0i1ymdwLXRJnYzvDM5r2w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@ampproject/remapping": "^2.2.0",
|
||||
"@babel/code-frame": "^7.27.1",
|
||||
@@ -1516,7 +1515,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
|
||||
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@octokit/auth-token": "^4.0.0",
|
||||
"@octokit/graphql": "^7.1.0",
|
||||
@@ -2444,7 +2442,6 @@
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz",
|
||||
"integrity": "sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==",
|
||||
"dev": true,
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
@@ -2914,7 +2911,6 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"caniuse-lite": "^1.0.30001726",
|
||||
"electron-to-chromium": "^1.5.173",
|
||||
@@ -3759,7 +3755,6 @@
|
||||
"deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.2.0",
|
||||
"@eslint-community/regexpp": "^4.6.1",
|
||||
@@ -4006,7 +4001,6 @@
|
||||
"integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@rtsao/scc": "^1.1.0",
|
||||
"array-includes": "^3.1.9",
|
||||
@@ -5641,7 +5635,6 @@
|
||||
"integrity": "sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@jest/core": "30.2.0",
|
||||
"@jest/types": "30.2.0",
|
||||
@@ -7117,7 +7110,6 @@
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
@@ -7229,7 +7221,6 @@
|
||||
"integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"prettier": "bin/prettier.cjs"
|
||||
},
|
||||
@@ -8366,7 +8357,6 @@
|
||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "test-reporter",
|
||||
"version": "2.5.0",
|
||||
"version": "2.3.0",
|
||||
"private": true,
|
||||
"description": "Presents test results from popular testing frameworks as Github check run",
|
||||
"main": "lib/main.js",
|
||||
|
||||
@@ -17,11 +17,9 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
|
||||
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
|
||||
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
|
||||
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
|
||||
import {PhpunitJunitParser} from './parsers/phpunit-junit/phpunit-junit-parser'
|
||||
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
|
||||
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
|
||||
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
|
||||
import {NetteTesterJunitParser} from './parsers/tester-junit/tester-junit-parser'
|
||||
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
|
||||
import {getCheckRunContext} from './utils/github-utils'
|
||||
|
||||
@@ -273,16 +271,12 @@ class TestReporter {
|
||||
return new JestJunitParser(options)
|
||||
case 'mocha-json':
|
||||
return new MochaJsonParser(options)
|
||||
case 'phpunit-junit':
|
||||
return new PhpunitJunitParser(options)
|
||||
case 'python-xunit':
|
||||
return new PythonXunitParser(options)
|
||||
case 'rspec-json':
|
||||
return new RspecJsonParser(options)
|
||||
case 'swift-xunit':
|
||||
return new SwiftXunitParser(options)
|
||||
case 'tester-junit':
|
||||
return new NetteTesterJunitParser(options)
|
||||
default:
|
||||
throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`)
|
||||
}
|
||||
|
||||
@@ -242,13 +242,13 @@ export class DartJsonParser implements TestParser {
|
||||
private getRelativePath(path: string): string {
|
||||
const prefix = 'file://'
|
||||
if (path.startsWith(prefix)) {
|
||||
path = path.substring(prefix.length)
|
||||
path = path.substr(prefix.length)
|
||||
}
|
||||
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length)
|
||||
path = path.substr(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ export class DotnetNunitParser implements TestParser {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length)
|
||||
path = path.substr(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -94,7 +94,7 @@ export class DotnetTrxParser implements TestParser {
|
||||
const resultTestName = r.result.$.testName
|
||||
const testName =
|
||||
resultTestName.startsWith(className) && resultTestName[className.length] === '.'
|
||||
? resultTestName.substring(className.length + 1)
|
||||
? resultTestName.substr(className.length + 1)
|
||||
: resultTestName
|
||||
|
||||
const test = new Test(testName, r.result.$.outcome, duration, error)
|
||||
@@ -177,7 +177,7 @@ export class DotnetTrxParser implements TestParser {
|
||||
const filePath = normalizeFilePath(fileStr)
|
||||
const workDir = this.getWorkDir(filePath)
|
||||
if (workDir) {
|
||||
const file = filePath.substring(workDir.length)
|
||||
const file = filePath.substr(workDir.length)
|
||||
if (trackedFiles.includes(file)) {
|
||||
const line = parseInt(lineStr)
|
||||
return {path: file, line}
|
||||
|
||||
@@ -106,7 +106,7 @@ export class JestJunitParser implements TestParser {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length)
|
||||
path = path.substr(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ export class MochaJsonParser implements TestParser {
|
||||
private processTest(suite: TestSuiteResult, test: MochaJsonTest, result: TestExecutionResult): void {
|
||||
const groupName =
|
||||
test.fullTitle !== test.title
|
||||
? test.fullTitle.substring(0, test.fullTitle.length - test.title.length).trimEnd()
|
||||
? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd()
|
||||
: null
|
||||
|
||||
let group = suite.groups.find(grp => grp.name === groupName)
|
||||
@@ -103,7 +103,7 @@ export class MochaJsonParser implements TestParser {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length)
|
||||
path = path.substr(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -1,258 +0,0 @@
|
||||
import {ParseOptions, TestParser} from '../../test-parser'
|
||||
import {parseStringPromise} from 'xml2js'
|
||||
|
||||
import {PhpunitReport, SingleSuiteReport, TestCase, TestSuite} from './phpunit-junit-types'
|
||||
import {getBasePath, normalizeFilePath} from '../../utils/path-utils'
|
||||
|
||||
import {
|
||||
TestExecutionResult,
|
||||
TestRunResult,
|
||||
TestSuiteResult,
|
||||
TestGroupResult,
|
||||
TestCaseResult,
|
||||
TestCaseError
|
||||
} from '../../test-results'
|
||||
|
||||
export class PhpunitJunitParser implements TestParser {
|
||||
readonly trackedFiles: Set<string>
|
||||
readonly trackedFilesList: string[]
|
||||
private assumedWorkDir: string | undefined
|
||||
|
||||
constructor(readonly options: ParseOptions) {
|
||||
this.trackedFilesList = options.trackedFiles.map(f => normalizeFilePath(f))
|
||||
this.trackedFiles = new Set(this.trackedFilesList)
|
||||
}
|
||||
|
||||
async parse(filePath: string, content: string): Promise<TestRunResult> {
|
||||
const reportOrSuite = await this.getPhpunitReport(filePath, content)
|
||||
const isReport = (reportOrSuite as PhpunitReport).testsuites !== undefined
|
||||
|
||||
// XML might contain:
|
||||
// - multiple suites under <testsuites> root node
|
||||
// - single <testsuite> as root node
|
||||
let report: PhpunitReport
|
||||
if (isReport) {
|
||||
report = reportOrSuite as PhpunitReport
|
||||
} else {
|
||||
// Make it behave the same way as if suite was inside <testsuites> root node
|
||||
const suite = (reportOrSuite as SingleSuiteReport).testsuite
|
||||
report = {
|
||||
testsuites: {
|
||||
$: {time: suite.$.time},
|
||||
testsuite: [suite]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.getTestRunResult(filePath, report)
|
||||
}
|
||||
|
||||
private async getPhpunitReport(filePath: string, content: string): Promise<PhpunitReport | SingleSuiteReport> {
|
||||
try {
|
||||
return await parseStringPromise(content)
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid XML at ${filePath}\n\n${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
private getTestRunResult(filePath: string, report: PhpunitReport): TestRunResult {
|
||||
const suites: TestSuiteResult[] = []
|
||||
this.collectSuites(suites, report.testsuites.testsuite ?? [])
|
||||
|
||||
const seconds = parseFloat(report.testsuites.$?.time ?? '')
|
||||
const time = isNaN(seconds) ? undefined : seconds * 1000
|
||||
return new TestRunResult(filePath, suites, time)
|
||||
}
|
||||
|
||||
private collectSuites(results: TestSuiteResult[], testsuites: TestSuite[]): void {
|
||||
for (const ts of testsuites) {
|
||||
// Recursively process nested test suites first (depth-first)
|
||||
if (ts.testsuite) {
|
||||
this.collectSuites(results, ts.testsuite)
|
||||
}
|
||||
|
||||
// Only add suites that have direct test cases
|
||||
// This avoids adding container suites that only hold nested suites
|
||||
if (ts.testcase && ts.testcase.length > 0) {
|
||||
const name = ts.$.name.trim()
|
||||
const time = parseFloat(ts.$.time) * 1000
|
||||
results.push(new TestSuiteResult(name, this.getGroups(ts), time))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getGroups(suite: TestSuite): TestGroupResult[] {
|
||||
if (!suite.testcase || suite.testcase.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
const groups: {name: string; tests: TestCase[]}[] = []
|
||||
for (const tc of suite.testcase) {
|
||||
// Use classname (PHPUnit style) for grouping
|
||||
// If classname matches suite name, use empty string to avoid redundancy
|
||||
const className = tc.$.classname ?? tc.$.class ?? ''
|
||||
const groupName = className === suite.$.name ? '' : className
|
||||
let grp = groups.find(g => g.name === groupName)
|
||||
if (grp === undefined) {
|
||||
grp = {name: groupName, tests: []}
|
||||
groups.push(grp)
|
||||
}
|
||||
grp.tests.push(tc)
|
||||
}
|
||||
|
||||
return groups.map(grp => {
|
||||
const tests = grp.tests.map(tc => {
|
||||
const name = tc.$.name.trim()
|
||||
const result = this.getTestCaseResult(tc)
|
||||
const time = parseFloat(tc.$.time) * 1000
|
||||
const error = this.getTestCaseError(tc)
|
||||
return new TestCaseResult(name, result, time, error)
|
||||
})
|
||||
return new TestGroupResult(grp.name, tests)
|
||||
})
|
||||
}
|
||||
|
||||
private getTestCaseResult(test: TestCase): TestExecutionResult {
|
||||
if (test.failure || test.error) return 'failed'
|
||||
if (test.skipped) return 'skipped'
|
||||
return 'success'
|
||||
}
|
||||
|
||||
private getTestCaseError(tc: TestCase): TestCaseError | undefined {
|
||||
if (!this.options.parseErrors) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// We process <error> and <failure> the same way
|
||||
const failures = tc.failure ?? tc.error
|
||||
if (!failures || failures.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const failure = failures[0]
|
||||
const details = typeof failure === 'string' ? failure : failure._ ?? ''
|
||||
|
||||
// PHPUnit provides file path directly in testcase attributes
|
||||
let filePath: string | undefined
|
||||
let line: number | undefined
|
||||
|
||||
if (tc.$.file) {
|
||||
const relativePath = this.getRelativePath(tc.$.file)
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
filePath = relativePath
|
||||
}
|
||||
if (tc.$.line) {
|
||||
line = parseInt(tc.$.line)
|
||||
}
|
||||
}
|
||||
|
||||
// If file not in tracked files, try to extract from error details
|
||||
if (!filePath && details) {
|
||||
const extracted = this.extractFileAndLine(details)
|
||||
if (extracted) {
|
||||
filePath = extracted.filePath
|
||||
line = extracted.line
|
||||
}
|
||||
}
|
||||
|
||||
let message: string | undefined
|
||||
if (typeof failure !== 'string' && failure.$) {
|
||||
message = failure.$.message
|
||||
if (failure.$.type) {
|
||||
message = message ? `${failure.$.type}: ${message}` : failure.$.type
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
path: filePath,
|
||||
line,
|
||||
details,
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
private extractFileAndLine(details: string): {filePath: string; line: number} | undefined {
|
||||
// PHPUnit stack traces typically have format: /path/to/file.php:123
|
||||
const lines = details.split(/\r?\n/)
|
||||
|
||||
for (const str of lines) {
|
||||
// Match patterns like /path/to/file.php:123 or at /path/to/file.php(123)
|
||||
const matchColon = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/)
|
||||
if (matchColon) {
|
||||
const relativePath = this.getRelativePath(matchColon[1])
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
return {filePath: relativePath, line: parseInt(matchColon[2])}
|
||||
}
|
||||
}
|
||||
|
||||
const matchParen = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt))\((\d+)\)/)
|
||||
if (matchParen) {
|
||||
const relativePath = this.getRelativePath(matchParen[1])
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
return {filePath: relativePath, line: parseInt(matchParen[2])}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an absolute file path to a relative path by stripping the working directory prefix.
|
||||
*
|
||||
* @param path - The absolute file path from PHPUnit output (e.g., `/home/runner/work/repo/src/Test.php`)
|
||||
* @returns The relative path (e.g., `src/Test.php`) if a working directory can be determined,
|
||||
* otherwise returns the normalized original path
|
||||
*/
|
||||
private getRelativePath(path: string): string {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the working directory prefix to strip from absolute file paths.
|
||||
*
|
||||
* The working directory is resolved using the following priority:
|
||||
*
|
||||
* 1. **Explicit configuration** - If `options.workDir` is set, it takes precedence.
|
||||
* This allows users to explicitly specify the working directory.
|
||||
*
|
||||
* 2. **Cached assumption** - If we've previously determined a working directory
|
||||
* (`assumedWorkDir`) and the current path starts with it, we reuse that value.
|
||||
* This avoids redundant computation for subsequent paths.
|
||||
*
|
||||
* 3. **Heuristic detection** - Uses `getBasePath()` to find the common prefix between
|
||||
* the absolute path and the list of tracked files in the repository. For example:
|
||||
* - Absolute path: `/home/runner/work/repo/src/Test.php`
|
||||
* - Tracked file: `src/Test.php`
|
||||
* - Detected workDir: `/home/runner/work/repo/`
|
||||
*
|
||||
* Once detected, the working directory is cached in `assumedWorkDir` for efficiency.
|
||||
*
|
||||
* @param path - The normalized absolute file path to analyze
|
||||
* @returns The working directory prefix (with trailing slash), or `undefined` if it cannot be determined
|
||||
*
|
||||
* @example
|
||||
* // With tracked file 'src/Foo.php' and path '/home/runner/work/repo/src/Foo.php'
|
||||
* // Returns: '/home/runner/work/repo/'
|
||||
*/
|
||||
private getWorkDir(path: string): string | undefined {
|
||||
if (this.options.workDir) {
|
||||
return this.options.workDir
|
||||
}
|
||||
|
||||
if (this.assumedWorkDir && path.startsWith(this.assumedWorkDir)) {
|
||||
return this.assumedWorkDir
|
||||
}
|
||||
|
||||
const basePath = getBasePath(path, this.trackedFilesList)
|
||||
if (basePath !== undefined) {
|
||||
this.assumedWorkDir = basePath
|
||||
}
|
||||
return basePath
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
export interface PhpunitReport {
|
||||
testsuites: TestSuites
|
||||
}
|
||||
|
||||
export interface SingleSuiteReport {
|
||||
testsuite: TestSuite
|
||||
}
|
||||
|
||||
export interface TestSuites {
|
||||
$?: {
|
||||
time?: string
|
||||
}
|
||||
testsuite?: TestSuite[]
|
||||
}
|
||||
|
||||
export interface TestSuite {
|
||||
$: {
|
||||
name: string
|
||||
tests?: string
|
||||
assertions?: string
|
||||
errors?: string
|
||||
failures?: string
|
||||
skipped?: string
|
||||
time: string
|
||||
file?: string
|
||||
}
|
||||
testcase?: TestCase[]
|
||||
testsuite?: TestSuite[]
|
||||
}
|
||||
|
||||
export interface TestCase {
|
||||
$: {
|
||||
name: string
|
||||
class?: string
|
||||
classname?: string
|
||||
file?: string
|
||||
line?: string
|
||||
assertions?: string
|
||||
time: string
|
||||
}
|
||||
failure?: Failure[]
|
||||
error?: Failure[]
|
||||
skipped?: string[]
|
||||
}
|
||||
|
||||
export interface Failure {
|
||||
_: string
|
||||
$?: {
|
||||
type?: string
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
@@ -55,7 +55,7 @@ export class RspecJsonParser implements TestParser {
|
||||
private processTest(suite: TestSuiteResult, test: RspecExample, result: TestExecutionResult): void {
|
||||
const groupName =
|
||||
test.full_description !== test.description
|
||||
? test.full_description.substring(0, test.full_description.length - test.description.length).trimEnd()
|
||||
? test.full_description.substr(0, test.full_description.length - test.description.length).trimEnd()
|
||||
: null
|
||||
|
||||
let group = suite.groups.find(grp => grp.name === groupName)
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
import * as path from 'path'
|
||||
import {ParseOptions, TestParser} from '../../test-parser'
|
||||
import {parseStringPromise} from 'xml2js'
|
||||
|
||||
import {NetteTesterReport, SingleSuiteReport, TestCase, TestSuite} from './tester-junit-types'
|
||||
import {normalizeFilePath} from '../../utils/path-utils'
|
||||
|
||||
import {
|
||||
TestExecutionResult,
|
||||
TestRunResult,
|
||||
TestSuiteResult,
|
||||
TestGroupResult,
|
||||
TestCaseResult,
|
||||
TestCaseError
|
||||
} from '../../test-results'
|
||||
|
||||
interface ParsedTestName {
|
||||
filePath: string
|
||||
method?: string
|
||||
description?: string
|
||||
className?: string
|
||||
displayName: string
|
||||
}
|
||||
|
||||
export class NetteTesterJunitParser implements TestParser {
|
||||
readonly trackedFiles: Set<string>
|
||||
readonly trackedFilesList: string[]
|
||||
|
||||
constructor(readonly options: ParseOptions) {
|
||||
this.trackedFilesList = options.trackedFiles.map(f => normalizeFilePath(f))
|
||||
this.trackedFiles = new Set(this.trackedFilesList)
|
||||
}
|
||||
|
||||
async parse(filePath: string, content: string): Promise<TestRunResult> {
|
||||
const reportOrSuite = await this.getNetteTesterReport(filePath, content)
|
||||
const isReport = (reportOrSuite as NetteTesterReport).testsuites !== undefined
|
||||
|
||||
// XML might contain:
|
||||
// - multiple suites under <testsuites> root node
|
||||
// - single <testsuite> as root node
|
||||
let report: NetteTesterReport
|
||||
if (isReport) {
|
||||
report = reportOrSuite as NetteTesterReport
|
||||
} else {
|
||||
// Make it behave the same way as if suite was inside <testsuites> root node
|
||||
const suite = (reportOrSuite as SingleSuiteReport).testsuite
|
||||
report = {
|
||||
testsuites: {
|
||||
$: {time: suite.$.time},
|
||||
testsuite: [suite]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.getTestRunResult(filePath, report)
|
||||
}
|
||||
|
||||
private async getNetteTesterReport(
|
||||
filePath: string,
|
||||
content: string
|
||||
): Promise<NetteTesterReport | SingleSuiteReport> {
|
||||
try {
|
||||
return await parseStringPromise(content)
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid XML at ${filePath}\n\n${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
private getTestRunResult(filePath: string, report: NetteTesterReport): TestRunResult {
|
||||
const suites =
|
||||
report.testsuites.testsuite === undefined
|
||||
? []
|
||||
: report.testsuites.testsuite.map((ts, index) => {
|
||||
// Use report file name as suite name (user preference)
|
||||
const fileName = path.basename(filePath)
|
||||
// If there are multiple test suites, add index to distinguish them
|
||||
const name =
|
||||
report.testsuites.testsuite && report.testsuites.testsuite.length > 1
|
||||
? `${fileName} #${index + 1}`
|
||||
: fileName
|
||||
const time = parseFloat(ts.$.time) * 1000
|
||||
const sr = new TestSuiteResult(name, this.getGroups(ts), time)
|
||||
return sr
|
||||
})
|
||||
|
||||
const seconds = parseFloat(report.testsuites.$?.time ?? '')
|
||||
const time = isNaN(seconds) ? undefined : seconds * 1000
|
||||
return new TestRunResult(filePath, suites, time)
|
||||
}
|
||||
|
||||
private getGroups(suite: TestSuite): TestGroupResult[] {
|
||||
if (!suite.testcase || suite.testcase.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Group tests by directory structure
|
||||
const groups: Map<string, TestCase[]> = new Map()
|
||||
|
||||
for (const tc of suite.testcase) {
|
||||
const parsed = this.parseTestCaseName(tc.$.classname)
|
||||
const directory = path.dirname(parsed.filePath)
|
||||
|
||||
if (!groups.has(directory)) {
|
||||
groups.set(directory, [])
|
||||
}
|
||||
groups.get(directory)!.push(tc)
|
||||
}
|
||||
|
||||
return Array.from(groups.entries()).map(([dir, tests]) => {
|
||||
const testResults = tests.map(tc => {
|
||||
const parsed = this.parseTestCaseName(tc.$.classname)
|
||||
const result = this.getTestCaseResult(tc)
|
||||
const time = parseFloat(tc.$.time || '0') * 1000
|
||||
const error = this.getTestCaseError(tc, parsed.filePath)
|
||||
return new TestCaseResult(parsed.displayName, result, time, error)
|
||||
})
|
||||
return new TestGroupResult(dir, testResults)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse test case name from classname attribute.
|
||||
*
|
||||
* Handles multiple patterns:
|
||||
* 1. Simple: "tests/Framework/Assert.equal.phpt"
|
||||
* 2. With method: "tests/Framework/Assert.equal.recursive.phpt [method=testSimple]"
|
||||
* 3. With description: "Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt"
|
||||
* 4. With class and method: "Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]"
|
||||
*/
|
||||
private parseTestCaseName(classname: string): ParsedTestName {
|
||||
let filePath = classname
|
||||
let method: string | undefined
|
||||
let description: string | undefined
|
||||
let className: string | undefined
|
||||
|
||||
// Pattern: "Description | filepath [method=methodName]"
|
||||
// or "ClassName | filepath [method=methodName]"
|
||||
const pipePattern = /^(.+?)\s*\|\s*(.+?)(?:\s*\[method=(.+?)\])?$/
|
||||
const pipeMatch = classname.match(pipePattern)
|
||||
|
||||
if (pipeMatch) {
|
||||
const prefix = pipeMatch[1].trim()
|
||||
filePath = pipeMatch[2].trim()
|
||||
method = pipeMatch[3]
|
||||
|
||||
// Check if prefix looks like a class name (contains backslash AND ends with dot)
|
||||
// Examples: "Kdyby\BootstrapFormRenderer\BootstrapRenderer."
|
||||
// vs description: "Prevent loop in error handling. The #268 regression."
|
||||
if (prefix.includes('\\') && prefix.endsWith('.')) {
|
||||
className = prefix
|
||||
} else {
|
||||
description = prefix
|
||||
}
|
||||
} else {
|
||||
// Pattern: "filepath [method=methodName]"
|
||||
const methodPattern = /^(.+?)\s*\[method=(.+?)\]$/
|
||||
const methodMatch = classname.match(methodPattern)
|
||||
|
||||
if (methodMatch) {
|
||||
filePath = methodMatch[1].trim()
|
||||
method = methodMatch[2].trim()
|
||||
}
|
||||
}
|
||||
|
||||
// Generate display name
|
||||
const baseName = path.basename(filePath)
|
||||
let displayName = baseName
|
||||
|
||||
if (method) {
|
||||
displayName = `${baseName}::${method}`
|
||||
}
|
||||
|
||||
if (description) {
|
||||
displayName = `${description} (${baseName})`
|
||||
} else if (className && method) {
|
||||
// For class names, keep them but still show the file
|
||||
displayName = `${baseName}::${method}`
|
||||
}
|
||||
|
||||
return {filePath, method, description, className, displayName}
|
||||
}
|
||||
|
||||
private getTestCaseResult(test: TestCase): TestExecutionResult {
|
||||
if (test.failure || test.error) return 'failed'
|
||||
if (test.skipped) return 'skipped'
|
||||
return 'success'
|
||||
}
|
||||
|
||||
private getTestCaseError(tc: TestCase, filePath: string): TestCaseError | undefined {
|
||||
if (!this.options.parseErrors) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// We process <error> and <failure> the same way
|
||||
const failures = tc.failure ?? tc.error
|
||||
if (!failures || failures.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const failure = failures[0]
|
||||
// For Nette Tester, details are in the message attribute, not as inner text
|
||||
const details = typeof failure === 'string' ? failure : failure._ ?? failure.$?.message ?? ''
|
||||
|
||||
// Try to extract file path and line from error details
|
||||
let errorFilePath: string | undefined
|
||||
let line: number | undefined
|
||||
|
||||
if (details) {
|
||||
const extracted = this.extractFileAndLine(details)
|
||||
if (extracted) {
|
||||
errorFilePath = extracted.filePath
|
||||
line = extracted.line
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: use test file path if tracked
|
||||
if (!errorFilePath) {
|
||||
const normalized = normalizeFilePath(filePath)
|
||||
if (this.trackedFiles.has(normalized)) {
|
||||
errorFilePath = normalized
|
||||
}
|
||||
}
|
||||
|
||||
let message: string | undefined
|
||||
if (typeof failure !== 'string' && failure.$) {
|
||||
message = failure.$.message
|
||||
if (failure.$.type) {
|
||||
message = message ? `${failure.$.type}: ${message}` : failure.$.type
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
path: errorFilePath,
|
||||
line,
|
||||
details,
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract file path and line number from error details.
|
||||
* Matches patterns like: /path/to/file.phpt:123 or /path/to/file.php:456
|
||||
*/
|
||||
private extractFileAndLine(details: string): {filePath: string; line: number} | undefined {
|
||||
const lines = details.split(/\r?\n/)
|
||||
|
||||
for (const str of lines) {
|
||||
// Match PHP file patterns: /path/to/file.phpt:123 or /path/to/file.php:456
|
||||
const match = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/)
|
||||
if (match) {
|
||||
const normalized = normalizeFilePath(match[1])
|
||||
if (this.trackedFiles.has(normalized)) {
|
||||
return {filePath: normalized, line: parseInt(match[2])}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
export interface NetteTesterReport {
|
||||
testsuites: TestSuites
|
||||
}
|
||||
|
||||
export interface SingleSuiteReport {
|
||||
testsuite: TestSuite
|
||||
}
|
||||
|
||||
export interface TestSuites {
|
||||
$?: {
|
||||
time?: string
|
||||
}
|
||||
testsuite?: TestSuite[]
|
||||
}
|
||||
|
||||
export interface TestSuite {
|
||||
$: {
|
||||
// NOTE: name attribute is intentionally omitted - Nette Tester doesn't provide it
|
||||
tests: string
|
||||
errors: string
|
||||
failures?: string
|
||||
skipped: string
|
||||
time: string
|
||||
timestamp?: string
|
||||
}
|
||||
testcase?: TestCase[]
|
||||
}
|
||||
|
||||
export interface TestCase {
|
||||
$: {
|
||||
classname: string // File path, possibly with method or description prefix
|
||||
name: string // Usually same as classname
|
||||
time: string
|
||||
}
|
||||
failure?: Failure[]
|
||||
error?: Failure[]
|
||||
skipped?: string[]
|
||||
}
|
||||
|
||||
export interface Failure {
|
||||
_?: string
|
||||
$?: {
|
||||
type?: string
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
@@ -36,7 +36,7 @@ export function ellipsis(text: string, maxLength: number): string {
|
||||
return text
|
||||
}
|
||||
|
||||
return text.substring(0, maxLength - 3) + '...'
|
||||
return text.substr(0, maxLength - 3) + '...'
|
||||
}
|
||||
|
||||
export function formatTime(ms: number): string {
|
||||
|
||||
@@ -34,6 +34,6 @@ export function getBasePath(path: string, trackedFiles: string[]): string | unde
|
||||
return undefined
|
||||
}
|
||||
|
||||
const base = path.substring(0, path.length - max.length)
|
||||
const base = path.substr(0, path.length - max.length)
|
||||
return base
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user