Add support for expected failures to gtest runner.
Bug: http://b/33251022
Test: manually added tests that unexpectedly failed
Test: bionic-unit-tests/bionic-unit-tests64 on bullhead
Change-Id: Ie9aef80014b9fd6512b68f0e42abc298732a9b6e
diff --git a/tests/gtest_main.cpp b/tests/gtest_main.cpp
index f0dac7c..b9ee585 100644
--- a/tests/gtest_main.cpp
+++ b/tests/gtest_main.cpp
@@ -170,6 +170,9 @@
void SetResult(TestResult result) { result_ = result; }
TestResult GetResult() const { return result_; }
+ TestResult GetExpectedResult() const {
+ return GetName().find("xfail_") == 0 ? TEST_FAILED : TEST_SUCCESS;
+ }
void SetTestTime(int64_t elapsed_time_ns) { elapsed_time_ns_ = elapsed_time_ns; }
@@ -224,6 +227,15 @@
return test_list_[test_id].GetResult();
}
+ TestResult GetExpectedTestResult(size_t test_id) const {
+ VerifyTestId(test_id);
+ return test_list_[test_id].GetExpectedResult();
+ }
+
+ bool GetTestSuccess(size_t test_id) const {
+ return GetTestResult(test_id) == GetExpectedTestResult(test_id);
+ }
+
void SetTestTime(size_t test_id, int64_t elapsed_time_ns) {
VerifyTestId(test_id);
test_list_[test_id].SetTestTime(elapsed_time_ns);
@@ -356,7 +368,7 @@
printf("%s", test_output.c_str());
TestResult result = testcase.GetTestResult(test_id);
- if (result == TEST_SUCCESS) {
+ if (result == testcase.GetExpectedTestResult(test_id)) {
ColoredPrintf(COLOR_GREEN, "[ OK ] ");
} else {
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
@@ -373,10 +385,19 @@
static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
TestResult result = testcase.GetTestResult(test_id);
+ TestResult expected = testcase.GetExpectedTestResult(test_id);
if (result == TEST_SUCCESS) {
- ColoredPrintf(COLOR_GREEN, "[ OK ] ");
+ if (expected == TEST_SUCCESS) {
+ ColoredPrintf(COLOR_GREEN, "[ OK ] ");
+ } else if (expected == TEST_FAILED) {
+ ColoredPrintf(COLOR_RED, "[ XPASS ] ");
+ }
} else if (result == TEST_FAILED) {
- ColoredPrintf(COLOR_RED, "[ FAILED ] ");
+ if (expected == TEST_SUCCESS) {
+ ColoredPrintf(COLOR_RED, "[ FAILED ] ");
+ } else if (expected == TEST_FAILED) {
+ ColoredPrintf(COLOR_YELLOW, "[ XFAIL ] ");
+ }
} else if (result == TEST_TIMEOUT) {
ColoredPrintf(COLOR_RED, "[ TIMEOUT ] ");
}
@@ -398,6 +419,7 @@
int64_t elapsed_time_ns) {
std::vector<std::string> fail_test_name_list;
+ std::vector<std::string> xpass_test_name_list;
std::vector<std::pair<std::string, int64_t>> timeout_test_list;
// For tests that were slow but didn't time out.
@@ -405,18 +427,28 @@
size_t testcase_count = testcase_list.size();
size_t test_count = 0;
size_t success_test_count = 0;
+ size_t expected_failure_count = 0;
for (const auto& testcase : testcase_list) {
test_count += testcase.TestCount();
for (size_t i = 0; i < testcase.TestCount(); ++i) {
TestResult result = testcase.GetTestResult(i);
- if (result == TEST_SUCCESS) {
- ++success_test_count;
- } else if (result == TEST_FAILED) {
- fail_test_name_list.push_back(testcase.GetTestName(i));
- } else if (result == TEST_TIMEOUT) {
- timeout_test_list.push_back(std::make_pair(testcase.GetTestName(i),
- testcase.GetTestTime(i)));
+ TestResult expected = testcase.GetExpectedTestResult(i);
+ if (result == TEST_TIMEOUT) {
+ timeout_test_list.push_back(
+ std::make_pair(testcase.GetTestName(i), testcase.GetTestTime(i)));
+ } else if (result == expected) {
+ if (result == TEST_SUCCESS) {
+ ++success_test_count;
+ } else {
+ ++expected_failure_count;
+ }
+ } else {
+ if (result == TEST_FAILED) {
+ fail_test_name_list.push_back(testcase.GetTestName(i));
+ } else {
+ xpass_test_name_list.push_back(testcase.GetTestName(i));
+ }
}
if (result != TEST_TIMEOUT &&
testcase.GetTestTime(i) / 1000000 >= GetSlowThresholdMs(testcase.GetTestName(i))) {
@@ -435,7 +467,12 @@
}
printf("\n");
ColoredPrintf(COLOR_GREEN, "[ PASS ] ");
- printf("%zu %s.\n", success_test_count, (success_test_count == 1) ? "test" : "tests");
+ printf("%zu %s.", success_test_count, (success_test_count == 1) ? "test" : "tests");
+ if (expected_failure_count > 0) {
+ printf(" (%zu expected failure%s)", expected_failure_count,
+ (expected_failure_count == 1) ? "" : "s");
+ }
+ printf("\n");
// Print tests that timed out.
size_t timeout_test_count = timeout_test_list.size();
@@ -472,7 +509,18 @@
}
}
- if (timeout_test_count > 0 || slow_test_count > 0 || fail_test_count > 0) {
+ // Print tests that should have failed.
+ size_t xpass_test_count = xpass_test_name_list.size();
+ if (xpass_test_count > 0) {
+ ColoredPrintf(COLOR_RED, "[ XPASS ] ");
+ printf("%zu %s, listed below:\n", xpass_test_count, (xpass_test_count == 1) ? "test" : "tests");
+ for (const auto& name : xpass_test_name_list) {
+ ColoredPrintf(COLOR_RED, "[ XPASS ] ");
+ printf("%s\n", name.c_str());
+ }
+ }
+
+ if (timeout_test_count > 0 || slow_test_count > 0 || fail_test_count > 0 || xpass_test_count > 0) {
printf("\n");
}
@@ -485,6 +533,10 @@
if (fail_test_count > 0) {
printf("%2zu FAILED %s\n", fail_test_count, (fail_test_count == 1) ? "TEST" : "TESTS");
}
+ if (xpass_test_count > 0) {
+ printf("%2zu SHOULD HAVE FAILED %s\n", xpass_test_count, (xpass_test_count == 1) ? "TEST" : "TESTS");
+ }
+
fflush(stdout);
}
@@ -540,7 +592,7 @@
auto& testcase = testcase_list[i];
total_test_count += testcase.TestCount();
for (size_t j = 0; j < testcase.TestCount(); ++j) {
- if (testcase.GetTestResult(j) != TEST_SUCCESS) {
+ if (!testcase.GetTestSuccess(j)) {
++failed_count_list[i];
}
elapsed_time_list[i] += testcase.GetTestTime(j);
@@ -568,7 +620,7 @@
fprintf(fp, " <testcase name=\"%s\" status=\"run\" time=\"%.3lf\" classname=\"%s\"",
testcase.GetTest(j).GetName().c_str(), testcase.GetTestTime(j) / 1e9,
testcase.GetName().c_str());
- if (testcase.GetTestResult(j) == TEST_SUCCESS) {
+ if (!testcase.GetTestSuccess(j)) {
fputs(" />\n", fp);
} else {
fputs(">\n", fp);
@@ -927,7 +979,7 @@
if (++finished_test_count_list[testcase_id] == testcase.TestCount()) {
++finished_testcase_count;
}
- if (testcase.GetTestResult(test_id) != TEST_SUCCESS) {
+ if (!testcase.GetTestSuccess(test_id)) {
all_tests_passed = false;
}