Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Change JUnit terraform test output to include test failure details inside <failure> elements, use the error message as the message attribute #36316

Merged
merged 9 commits into from
Jan 22, 2025
113 changes: 78 additions & 35 deletions internal/command/junit/junit.go
Original file line number Diff line number Diff line change
Expand Up @@ -193,14 +193,6 @@ func junitXMLTestReport(suite *moduletest.Suite, suiteRunnerStopped bool, source
for i, run := range file.Runs {
// Each run is a "test case".

// By creating a map of diags we can delete them as they're used below
// This helps to identify diags that are only appropriate to include in
// the "system-err" element
diagsMap := make(map[int]tfdiags.Diagnostic, len(run.Diagnostics))
for i, diag := range run.Diagnostics {
diagsMap[i] = diag
}

testCase := testCase{
Name: run.Name,

Expand All @@ -223,46 +215,64 @@ func junitXMLTestReport(suite *moduletest.Suite, suiteRunnerStopped bool, source
Body: body,
}
case moduletest.Fail:
var failedAssertion tfdiags.Diagnostic
for key, d := range diagsMap {
// Find the diag resulting from a failed assertion
if d.Description().Summary == failedTestSummary {
failedAssertion = d
delete(diagsMap, key)
break
// When the test fails we only use error diags that originate from failing assertions
var failedAssertions tfdiags.Diagnostics
for _, d := range run.Diagnostics {
if d.Severity() == tfdiags.Error && d.Description().Summary == failedTestSummary {
failedAssertions = failedAssertions.Append(d)
}
}

testCase.Failure = &withMessage{
Message: failedAssertion.Description().Detail,
Body: format.DiagnosticPlain(failedAssertion, sources, 80),
Message: failureMessage(failedAssertions, len(run.Config.CheckRules)),
Body: getDiagString(failedAssertions, sources),
}

case moduletest.Error:
var diagsStr strings.Builder
for key, d := range diagsMap {
diagsStr.WriteString(format.DiagnosticPlain(d, sources, 80))
delete(diagsMap, key)
// When the test errors we use all diags with Error severity
var errDiags tfdiags.Diagnostics
for _, d := range run.Diagnostics {
if d.Severity() == tfdiags.Error {
errDiags = errDiags.Append(d)
}
}

testCase.Error = &withMessage{
Message: "Encountered an error",
Body: diagsStr.String(),
Body: getDiagString(errDiags, sources),
}
}
if len(diagsMap) != 0 && testCase.Error == nil {
// If we have diagnostics but the outcome wasn't an error
// then we're presumably holding diagnostics that didn't
// cause the test to error, such as warnings. We'll place
// those into the "system-err" element instead, so that
// they'll be reported _somewhere_ at least.
var diagsStr strings.Builder
for key, diag := range diagsMap {
diagsStr.WriteString(format.DiagnosticPlain(diag, sources, 80))
delete(diagsMap, key)

// Determine if there are diagnostics left unused by the switch block above
// that should be included in the "system-err" element
if len(run.Diagnostics) > 0 {
var systemErrDiags tfdiags.Diagnostics

if run.Status == moduletest.Error && run.Diagnostics.HasWarnings() {
// If the test case errored, then all Error diags are in the "error" element
// Therefore we'd only need to include warnings in "system-err"
systemErrDiags = getWarnings(run.Diagnostics)
}
testCase.Stderr = &withMessage{
Body: diagsStr.String(),

if run.Status != moduletest.Error {
// If a test hasn't errored then we need to find all diagnostics that aren't due
// to a failing assertion in a test (these are already displayed in the "failure" element)

// Collect diags not due to failed assertions, both errors and warnings
for _, d := range run.Diagnostics {
if d.Description().Summary != failedTestSummary {
systemErrDiags = systemErrDiags.Append(d)
}
}
}

if len(systemErrDiags) > 0 {
testCase.Stderr = &withMessage{
Body: getDiagString(systemErrDiags, sources),
}
}
}

enc.EncodeElement(&testCase, xml.StartElement{
Name: caseName,
})
Expand All @@ -275,7 +285,19 @@ func junitXMLTestReport(suite *moduletest.Suite, suiteRunnerStopped bool, source
return buf.Bytes(), nil
}

// getSkipDetails checks data about the test suite, file and runs to determine why a given run was skipped
func failureMessage(failedAssertions tfdiags.Diagnostics, checkCount int) string {
if len(failedAssertions) == 0 {
return ""
}

if len(failedAssertions) == 1 {
// Slightly different format if only single assertion failure
return fmt.Sprintf("%d of %d assertions failed: %s", len(failedAssertions), checkCount, failedAssertions[0].Description().Detail)
}

// Handle multiple assertion failures
return fmt.Sprintf("%d of %d assertions failed, including: %s", len(failedAssertions), checkCount, failedAssertions[0].Description().Detail)
}
// Test can be skipped due to:
// 1. terraform test recieving an interrupt from users; all unstarted tests will be skipped
// 2. A previous run in a file has failed, causing subsequent run blocks to be skipped
Expand Down Expand Up @@ -321,3 +343,24 @@ func suiteFilesAsSortedList(files map[string]*moduletest.File) []*moduletest.Fil
}
return sortedFiles
}

func getDiagString(diags tfdiags.Diagnostics, sources map[string][]byte) string {
var diagsStr strings.Builder
for _, d := range diags {
diagsStr.WriteString(format.DiagnosticPlain(d, sources, 80))
}
return diagsStr.String()
}

func getWarnings(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
// Collect warnings
warnDiags := tfdiags.Diagnostics{}
if diags.HasWarnings() {
for _, d := range diags {
if d.Severity() == tfdiags.Warning {
warnDiags = warnDiags.Append(d)
}
}
}
return warnDiags
}
Comment on lines +358 to +369
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nitpick - AFAICT the code here would work just as well without HasWarnings() - it would return empty slice as nothing would be appended.

On a separate note I am somewhat surprised we don't have Warnings() method yet on tfdiags.Diagnostics - so I think it may be another candidate for logic to extract there and just call from here but feel free to leave this for another PR if you like.

81 changes: 81 additions & 0 deletions internal/command/junit/junit_internal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@ import (
"os"
"testing"

"github.com/google/go-cmp/cmp"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/terraform/internal/moduletest"
"github.com/hashicorp/terraform/internal/tfdiags"
)

func Test_TestJUnitXMLFile_save(t *testing.T) {
Expand Down Expand Up @@ -67,6 +70,66 @@ func Test_TestJUnitXMLFile_save(t *testing.T) {
}
}


func Test_getWarnings(t *testing.T) {

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change

errorDiag := &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "error",
Detail: "this is an error",
}

warnDiag := &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "warning",
Detail: "this is a warning",
}

cases := map[string]struct {
diags tfdiags.Diagnostics
expected tfdiags.Diagnostics
}{
"empty diags": {
diags: tfdiags.Diagnostics{},
expected: tfdiags.Diagnostics{},
},
"nil diags": {
diags: nil,
expected: tfdiags.Diagnostics{},
},
"all error diags": {
diags: func() tfdiags.Diagnostics {
var d tfdiags.Diagnostics
d = d.Append(errorDiag, errorDiag, errorDiag)
return d
}(),
expected: tfdiags.Diagnostics{},
},
"mixture of error and warning diags": {
diags: func() tfdiags.Diagnostics {
var d tfdiags.Diagnostics
d = d.Append(errorDiag, errorDiag, warnDiag) // 1 warning
return d
}(),
expected: func() tfdiags.Diagnostics {
var d tfdiags.Diagnostics
d = d.Append(warnDiag) // 1 warning
return d
}(),
},
}

for tn, tc := range cases {
t.Run(tn, func(t *testing.T) {
warnings := getWarnings(tc.diags)

if diff := cmp.Diff(tc.expected, warnings, cmp.Comparer(diagnosticComparer)); diff != "" {
t.Errorf("wrong diagnostics\n%s", diff)
}
})
}
}

func Test_suiteFilesAsSortedList(t *testing.T) {
cases := map[string]struct {
Suite *moduletest.Suite
Expand Down Expand Up @@ -151,3 +214,21 @@ func Test_suiteFilesAsSortedList(t *testing.T) {
})
}
}

// From internal/backend/remote-state/s3/testing_test.go
// diagnosticComparer is a Comparer function for use with cmp.Diff to compare two tfdiags.Diagnostic values
Comment on lines +217 to +218
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should/could this be moved into the tfdiags package? Or are there other ways that diagnostics are compared in the code base that are a better option?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think moving it into tfdiags is a good idea!

We could follow the pattern of ctydebug - https://github.com/zclconf/go-cty-debug/blob/0d6042c539401a57fc0cca85ded2861d4a5173c4/ctydebug/cmp.go#L19-L42 I don't mean the implementation necessarily but more the fact that it exports a cmp.Option variable which can be directly used as an argument in cmp.Diff.

func diagnosticComparer(l, r tfdiags.Diagnostic) bool {
if l.Severity() != r.Severity() {
return false
}
if l.Description() != r.Description() {
return false
}

lp := tfdiags.GetAttribute(l)
rp := tfdiags.GetAttribute(r)
if len(lp) != len(rp) {
return false
}
return lp.Equals(rp)
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?><testsuites>
<testsuite name="main.tftest.hcl" tests="2" skipped="0" failures="1" errors="0">
<testcase name="failing_assertion" classname="main.tftest.hcl" time="TIME_REDACTED" timestamp="TIMESTAMP_REDACTED">
<failure message="local variable &#39;number&#39; has a value greater than zero, so assertion 2 will fail"><![CDATA[
<failure message="1 of 3 assertions failed: local variable &#39;number&#39; has a value greater than zero, so assertion 2 will fail"><![CDATA[
Error: Test assertion failed

on main.tftest.hcl line 7, in run "failing_assertion":
Expand Down