Compare commits

..

3 Commits

Author SHA1 Message Date
Jozef Izso
12c7abe9ab Add conclusion output column to integration test summary table
- Added job outputs to expose conclusion from each test scenario
- Added new "Conclusion" column to summary table with colored badges
- Shows actual conclusion output (🟢 success / 🔴 failure /  N/A)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-14 15:12:30 +01:00
Jozef Izso
3b5ad0231b Update scenario 4 to be a regression test for issue #217
The bug has been fixed - conclusion output now correctly reflects
test failures independent of fail-on-error setting. Updated comments
and summary to indicate this is now a regression test.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-14 15:12:15 +01:00
Jozef Izso
c89704a410 Add integration tests for fail-on-error and fail-on-empty scenarios (#217)
Add workflow and fixtures to test the behavior of fail-on-error and
fail-on-empty parameters across different scenarios:

- Passing tests with fail-on-error true/false
- Failing tests with fail-on-error true/false
- Empty results with fail-on-empty true/false

Scenario 4 (failing tests + fail-on-error=false) is expected to fail
until issue #217 is fixed, documenting the bug where check conclusion
shows 'success' even when tests fail.

The workflow outputs a GitHub Actions summary with a markdown table
showing all test results.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-14 15:11:36 +01:00
10 changed files with 347 additions and 407 deletions

View File

@@ -0,0 +1,320 @@
name: Integration Tests (#217) - fail-on-error/fail-on-empty
on:
workflow_dispatch:
push:
pull_request:
paths:
- 'src/**'
- 'dist/**'
- 'action.yml'
- '.github/workflows/integration-tests.yml'
- '__tests__/fixtures/integration/**'
jobs:
# ============================================
# Scenario 1: Passing tests, fail-on-error=true
# Expected: Step passes, conclusion=success
# ============================================
test-passing-fail-on-error-true:
name: "Passing tests | fail-on-error=true"
runs-on: ubuntu-slim
outputs:
conclusion: ${{ steps.report.outputs.conclusion }}
steps:
- uses: actions/checkout@v6
- name: Run test reporter
id: report
uses: ./
with:
name: 'Integration Test - Passing (fail-on-error=true)'
path: '__tests__/fixtures/integration/passing-tests.xml'
reporter: java-junit
fail-on-error: 'true'
fail-on-empty: 'true'
- name: Validate results
run: |
echo "=== Test Results ==="
echo "Step outcome: success (would have failed otherwise)"
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
echo "Passed: ${{ steps.report.outputs.passed }}"
echo "Failed: ${{ steps.report.outputs.failed }}"
if [ "${{ steps.report.outputs.conclusion }}" != "success" ]; then
echo "FAIL: Expected conclusion 'success' but got '${{ steps.report.outputs.conclusion }}'"
exit 1
fi
echo "PASS: All validations passed"
# ============================================
# Scenario 2: Passing tests, fail-on-error=false
# Expected: Step passes, conclusion=success
# ============================================
test-passing-fail-on-error-false:
name: "Passing tests | fail-on-error=false"
runs-on: ubuntu-slim
outputs:
conclusion: ${{ steps.report.outputs.conclusion }}
steps:
- uses: actions/checkout@v6
- name: Run test reporter
id: report
uses: ./
with:
name: 'Integration Test - Passing (fail-on-error=false)'
path: '__tests__/fixtures/integration/passing-tests.xml'
reporter: java-junit
fail-on-error: 'false'
fail-on-empty: 'true'
- name: Validate results
run: |
echo "=== Test Results ==="
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
if [ "${{ steps.report.outputs.conclusion }}" != "success" ]; then
echo "FAIL: Expected conclusion 'success' but got '${{ steps.report.outputs.conclusion }}'"
exit 1
fi
echo "PASS: All validations passed"
# ============================================
# Scenario 3: Failing tests, fail-on-error=true
# Expected: Step FAILS, conclusion=failure
# ============================================
test-failing-fail-on-error-true:
name: "Failing tests | fail-on-error=true"
runs-on: ubuntu-slim
outputs:
conclusion: ${{ steps.report.outputs.conclusion }}
steps:
- uses: actions/checkout@v6
- name: Run test reporter
id: report
continue-on-error: true
uses: ./
with:
name: 'Integration Test - Failing (fail-on-error=true)'
path: '__tests__/fixtures/integration/failing-tests.xml'
reporter: java-junit
fail-on-error: 'true'
fail-on-empty: 'true'
- name: Validate results
run: |
echo "=== Test Results ==="
echo "Step outcome: ${{ steps.report.outcome }}"
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
echo "Failed count: ${{ steps.report.outputs.failed }}"
# Step should fail
if [ "${{ steps.report.outcome }}" != "failure" ]; then
echo "FAIL: Expected step to fail but got '${{ steps.report.outcome }}'"
exit 1
fi
# Conclusion should be failure
if [ "${{ steps.report.outputs.conclusion }}" != "failure" ]; then
echo "FAIL: Expected conclusion 'failure' but got '${{ steps.report.outputs.conclusion }}'"
exit 1
fi
echo "PASS: All validations passed"
# ============================================
# Scenario 4: Failing tests, fail-on-error=false
# Expected: Step passes, conclusion=failure
# Regression test for issue #217
# ============================================
test-failing-fail-on-error-false:
name: "Failing tests | fail-on-error=false [#217]"
runs-on: ubuntu-slim
outputs:
conclusion: ${{ steps.report.outputs.conclusion }}
steps:
- uses: actions/checkout@v6
- name: Run test reporter
id: report
continue-on-error: true
uses: ./
with:
name: 'Integration Test - Failing (fail-on-error=false)'
path: '__tests__/fixtures/integration/failing-tests.xml'
reporter: java-junit
fail-on-error: 'false'
fail-on-empty: 'true'
- name: Validate results
run: |
echo "=== Test Results ==="
echo "Step outcome: ${{ steps.report.outcome }}"
echo "Conclusion: ${{ steps.report.outputs.conclusion }}"
echo "Failed count: ${{ steps.report.outputs.failed }}"
# Step should pass (fail-on-error is false)
if [ "${{ steps.report.outcome }}" != "success" ]; then
echo "FAIL: Expected step to pass but got '${{ steps.report.outcome }}'"
exit 1
fi
# Conclusion SHOULD be 'failure' because tests failed
# Regression test for issue #217
if [ "${{ steps.report.outputs.conclusion }}" != "failure" ]; then
echo "========================================"
echo "REGRESSION DETECTED (Issue #217)"
echo "========================================"
echo "Expected conclusion 'failure' but got '${{ steps.report.outputs.conclusion }}'"
echo "The check conclusion should reflect test results,"
echo "independent of the fail-on-error setting."
echo "========================================"
exit 1
fi
echo "PASS: All validations passed"
# ============================================
# Scenario 5: Empty results, fail-on-empty=true
# Expected: Step FAILS
# ============================================
test-empty-fail-on-empty-true:
name: "Empty results | fail-on-empty=true"
runs-on: ubuntu-slim
outputs:
conclusion: ${{ steps.report.outputs.conclusion || 'N/A' }}
steps:
- uses: actions/checkout@v6
- name: Run test reporter
id: report
continue-on-error: true
uses: ./
with:
name: 'Integration Test - Empty (fail-on-empty=true)'
path: '__tests__/fixtures/integration/nonexistent-*.xml'
reporter: java-junit
fail-on-error: 'true'
fail-on-empty: 'true'
- name: Validate results
run: |
echo "=== Test Results ==="
echo "Step outcome: ${{ steps.report.outcome }}"
# Step should fail (no files found)
if [ "${{ steps.report.outcome }}" != "failure" ]; then
echo "FAIL: Expected step to fail but got '${{ steps.report.outcome }}'"
exit 1
fi
echo "PASS: Step correctly failed on empty results"
# ============================================
# Scenario 6: Empty results, fail-on-empty=false
# Expected: Step passes, conclusion=success
# ============================================
test-empty-fail-on-empty-false:
name: "Empty results | fail-on-empty=false"
runs-on: ubuntu-slim
outputs:
conclusion: ${{ steps.report.outputs.conclusion || 'N/A' }}
steps:
- uses: actions/checkout@v6
- name: Run test reporter
id: report
continue-on-error: true
uses: ./
with:
name: 'Integration Test - Empty (fail-on-empty=false)'
path: '__tests__/fixtures/integration/nonexistent-*.xml'
reporter: java-junit
fail-on-error: 'true'
fail-on-empty: 'false'
- name: Validate results
run: |
echo "=== Test Results ==="
echo "Step outcome: ${{ steps.report.outcome }}"
# Step should pass (fail-on-empty is false)
if [ "${{ steps.report.outcome }}" != "success" ]; then
echo "FAIL: Expected step to pass but got '${{ steps.report.outcome }}'"
exit 1
fi
echo "PASS: Step correctly passed with empty results"
# ============================================
# Summary job to report overall status
# ============================================
summary:
name: "Test Summary"
needs:
- test-passing-fail-on-error-true
- test-passing-fail-on-error-false
- test-failing-fail-on-error-true
- test-failing-fail-on-error-false
- test-empty-fail-on-empty-true
- test-empty-fail-on-empty-false
runs-on: ubuntu-slim
if: always()
steps:
- name: Generate summary
run: |
# Helper function to convert result to emoji
result_to_emoji() {
case "$1" in
success) echo "✅ Pass" ;;
failure) echo "❌ Fail" ;;
cancelled) echo "⚪ Cancelled" ;;
skipped) echo "⏭️ Skipped" ;;
*) echo "❓ Unknown" ;;
esac
}
# Helper function to format conclusion
conclusion_to_badge() {
case "$1" in
success) echo "🟢 success" ;;
failure) echo "🔴 failure" ;;
N/A) echo "⚫ N/A" ;;
*) echo "⚪ $1" ;;
esac
}
# Generate markdown summary
cat >> $GITHUB_STEP_SUMMARY << 'EOF'
# Integration Test Results
## fail-on-error / fail-on-empty Scenarios
| Scenario | Test Results | fail-on-error | fail-on-empty | Expected | Conclusion | Result |
|----------|--------------|---------------|---------------|----------|------------|--------|
EOF
echo "| 1 | All pass | \`true\` | \`true\` | Step: pass, Check: success | $(conclusion_to_badge "${{ needs.test-passing-fail-on-error-true.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-passing-fail-on-error-true.result }}") |" >> $GITHUB_STEP_SUMMARY
echo "| 2 | All pass | \`false\` | \`true\` | Step: pass, Check: success | $(conclusion_to_badge "${{ needs.test-passing-fail-on-error-false.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-passing-fail-on-error-false.result }}") |" >> $GITHUB_STEP_SUMMARY
echo "| 3 | Some fail | \`true\` | \`true\` | Step: fail, Check: failure | $(conclusion_to_badge "${{ needs.test-failing-fail-on-error-true.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-failing-fail-on-error-true.result }}") |" >> $GITHUB_STEP_SUMMARY
echo "| 4 | Some fail | \`false\` | \`true\` | Step: pass, Check: failure | $(conclusion_to_badge "${{ needs.test-failing-fail-on-error-false.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-failing-fail-on-error-false.result }}") |" >> $GITHUB_STEP_SUMMARY
echo "| 5 | Empty | \`true\` | \`true\` | Step: fail | $(conclusion_to_badge "${{ needs.test-empty-fail-on-empty-true.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-empty-fail-on-empty-true.result }}") |" >> $GITHUB_STEP_SUMMARY
echo "| 6 | Empty | \`true\` | \`false\` | Step: pass | $(conclusion_to_badge "${{ needs.test-empty-fail-on-empty-false.outputs.conclusion }}") | $(result_to_emoji "${{ needs.test-empty-fail-on-empty-false.result }}") |" >> $GITHUB_STEP_SUMMARY
cat >> $GITHUB_STEP_SUMMARY << 'EOF'
---
> **Scenario 4** is a regression test for [issue #217](https://github.com/dorny/test-reporter/issues/217).
> It verifies that `conclusion` output correctly reflects test failures, independent of `fail-on-error` setting.
> When `fail-on-error=false`, the step should pass but `conclusion` should still be `failure` if tests failed.
EOF
# Also print to console
echo "=== Integration Test Summary ==="
echo "Scenario 1 (pass, fail-on-error=true): ${{ needs.test-passing-fail-on-error-true.result }}"
echo "Scenario 2 (pass, fail-on-error=false): ${{ needs.test-passing-fail-on-error-false.result }}"
echo "Scenario 3 (fail, fail-on-error=true): ${{ needs.test-failing-fail-on-error-true.result }}"
echo "Scenario 4 (fail, fail-on-error=false): ${{ needs.test-failing-fail-on-error-false.result }} (regression test for #217)"
echo "Scenario 5 (empty, fail-on-empty=true): ${{ needs.test-empty-fail-on-empty-true.result }}"
echo "Scenario 6 (empty, fail-on-empty=false): ${{ needs.test-empty-fail-on-empty-false.result }}"

View File

@@ -1,23 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
```
✅ testCase7
✅ testCase8
❌ testCase9
AssertionError: Assertion error message
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
```

View File

@@ -1,22 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%202%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
⚪ testCase4
❌ testCase5
AssertionError: Expected value did not match.
❌ testCase6
ArithmeticError: Division by zero.
✅ testCase7
✅ testCase8
```

View File

@@ -6878,153 +6878,3 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2126531.0000000005,
}
`;
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-basic.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2113.871,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1051,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 3441,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 6605.870999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2508,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1230.8159999999998,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Assertion error message",
"path": undefined,
},
"name": "testCase9",
"result": "failed",
"time": 982,
},
],
},
],
"name": "Tests.Authentication",
"totalTime": 9076.816,
},
],
"totalTime": 15682.687,
}
`;
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-complete.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2436,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1534,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 822,
},
TestCaseResult {
"error": undefined,
"name": "testCase4",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Expected value did not match.",
"path": undefined,
},
"name": "testCase5",
"result": "failed",
"time": 2902.412,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "ArithmeticError: Division by zero.",
"path": undefined,
},
"name": "testCase6",
"result": "failed",
"time": 3819,
},
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2944,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1625.275,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 16082.687,
},
],
"totalTime": 16082.687,
}
`;

View File

@@ -1,31 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a basic JUnit-style XML example to highlight the basis structure.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<testsuites time="15.682687">
<testsuite name="Tests.Registration" time="6.605871">
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
</testsuite>
<testsuite name="Tests.Authentication" time="9.076816">
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
<!--
<testsuite name="Tests.Authentication.Login" time="4.356">
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
</testsuite>
-->
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
<failure message="Assertion error message" type="AssertionError">
<!-- Call stack printed here -->
</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -1,141 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a JUnit-style XML example with commonly used tags and attributes.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
the <testsuites> element if there is only a single top-level <testsuite> element (which
is then used as the root element).
name Name of the entire test run
tests Total number of tests in this file
failures Total number of failed tests in this file
errors Total number of errored tests in this file
skipped Total number of skipped tests in this file
assertions Total number of assertions for all tests in this file
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test run was executed (in ISO 8601 format)
-->
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
There can be many test suites in an XML file, and there can be test suites under other
test suites.
name Name of the test suite (e.g. class name or folder name)
tests Total number of tests in this suite
failures Total number of failed tests in this suite
errors Total number of errored tests in this suite
skipped Total number of skipped tests in this suite
assertions Total number of assertions for all tests in this suite
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
file Source code file of this test suite
-->
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
file="tests/registration.code">
<!-- <properties> Test suites (and test cases, see below) can have additional
properties such as environment variables or version numbers. -->
<properties>
<!-- <property> Each property has a name and value. Some tools also support
properties with text values instead of value attributes. -->
<property name="version" value="1.774" />
<property name="commit" value="ef7bebf" />
<property name="browser" value="Google Chrome" />
<property name="ci" value="https://github.com/actions/runs/1234" />
<property name="config">
Config line #1
Config line #2
Config line #3
</property>
</properties>
<!-- <system-out> Optionally data written to standard out for the suite.
Also supported on a test case level, see below. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optionally data written to standard error for the suite.
Also supported on a test case level, see below. -->
<system-err>Data written to standard error.</system-err>
<!-- <testcase> There are one or more test cases in a test suite. A test passed
if there isn't an additional result element (skipped, failure, error).
name The name of this test case, often the method name
classname The name of the parent class/folder, often the same as the suite's name
assertions Number of assertions checked during test case execution
time Execution time of the test in seconds
file Source code file of this test case
line Source code line number of the start of this test case
-->
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
time="2.436" file="tests/registration.code" line="24" />
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
time="1.534" file="tests/registration.code" line="62" />
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
time="0.822" file="tests/registration.code" line="102" />
<!-- Example of a test case that was skipped -->
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
time="0" file="tests/registration.code" line="164">
<!-- <skipped> Indicates that the test was not executed. Can have an optional
message describing why the test was skipped. -->
<skipped message="Test was skipped." />
</testcase>
<!-- Example of a test case that failed. -->
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
time="2.902412" file="tests/registration.code" line="202">
<!-- <failure> The test failed because one of the assertions/checks failed.
Can have a message and failure type, often the assertion type or class. The text
content of the element often includes the failure description or stack trace. -->
<failure message="Expected value did not match." type="AssertionError">
<!-- Failure description or stack trace -->
</failure>
</testcase>
<!-- Example of a test case that had errors. -->
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
time="3.819" file="tests/registration.code" line="235">
<!-- <error> The test had an unexpected error during execution. Can have a
message and error type, often the exception type or class. The text
content of the element often includes the error description or stack trace. -->
<error message="Division by zero." type="ArithmeticError">
<!-- Error description or stack trace -->
</error>
</testcase>
<!-- Example of a test case with outputs. -->
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
time="2.944" file="tests/registration.code" line="287">
<!-- <system-out> Optional data written to standard out for the test case. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optional data written to standard error for the test case. -->
<system-err>Data written to standard error.</system-err>
</testcase>
<!-- Example of a test case with properties -->
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
time="1.625275" file="tests/registration.code" line="302">
<!-- <properties> Some tools also support properties for test cases. -->
<properties>
<property name="priority" value="high" />
<property name="language" value="english" />
<property name="author" value="Adrian" />
<property name="attachment" value="screenshots/dashboard.png" />
<property name="attachment" value="screenshots/users.png" />
<property name="description">
This text describes the purpose of this test case and provides
an overview of what the test does and how it works.
</property>
</properties>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="EmptySuite" tests="0" failures="0" errors="0" time="0">
<testsuite name="EmptySuite" tests="0" failures="0" errors="0" time="0">
</testsuite>
</testsuites>

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="FailingSuite" tests="3" failures="1" errors="0" time="0.5">
<testsuite name="FailingSuite" tests="3" failures="1" errors="0" time="0.5">
<testcase name="should pass test 1" classname="FailingSuite" time="0.1"/>
<testcase name="should fail test 2" classname="FailingSuite" time="0.2">
<failure message="Assertion failed" type="AssertionError">
Expected: true
Received: false
at Object.test (/test/example.test.js:10:5)
</failure>
</testcase>
<testcase name="should pass test 3" classname="FailingSuite" time="0.2"/>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="PassingSuite" tests="3" failures="0" errors="0" time="0.5">
<testsuite name="PassingSuite" tests="3" failures="0" errors="0" time="0.5">
<testcase name="should pass test 1" classname="PassingSuite" time="0.1"/>
<testcase name="should pass test 2" classname="PassingSuite" time="0.2"/>
<testcase name="should pass test 3" classname="PassingSuite" time="0.2"/>
</testsuite>
</testsuites>

View File

@@ -73,46 +73,6 @@ describe('java-junit tests', () => {
fs.writeFileSync(outputPath, report)
})
it('report from testmo/junitxml basic example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report from testmo/junitxml complete example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('parses empty failures in test results', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))