Christopher Ferris | 1a99356 | 2018-08-21 12:43:50 -0700 | [diff] [blame^] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include <errno.h> |
| 18 | #include <fcntl.h> |
| 19 | #include <inttypes.h> |
| 20 | #include <poll.h> |
| 21 | #include <signal.h> |
| 22 | #include <stdio.h> |
| 23 | #include <string.h> |
| 24 | #include <unistd.h> |
| 25 | |
| 26 | #include <atomic> |
| 27 | #include <string> |
| 28 | #include <tuple> |
| 29 | #include <vector> |
| 30 | |
| 31 | #include <android-base/logging.h> |
| 32 | #include <android-base/strings.h> |
| 33 | #include <android-base/unique_fd.h> |
| 34 | #include <gtest/gtest.h> |
| 35 | |
| 36 | #include "Color.h" |
| 37 | #include "Isolate.h" |
| 38 | #include "NanoTime.h" |
| 39 | #include "Test.h" |
| 40 | |
| 41 | namespace android { |
| 42 | namespace gtest_extras { |
| 43 | |
| 44 | static std::atomic_int g_signal; |
| 45 | |
| 46 | static void SignalHandler(int sig) { |
| 47 | g_signal = sig; |
| 48 | } |
| 49 | |
| 50 | static void RegisterSignalHandler() { |
| 51 | sighandler_t ret = signal(SIGINT, SignalHandler); |
| 52 | if (ret == SIG_ERR) { |
| 53 | PLOG(FATAL) << "Setting up SIGINT handler failed"; |
| 54 | } |
| 55 | ret = signal(SIGQUIT, SignalHandler); |
| 56 | if (ret == SIG_ERR) { |
| 57 | PLOG(FATAL) << "Setting up SIGQUIT handler failed"; |
| 58 | } |
| 59 | } |
| 60 | |
| 61 | static void UnregisterSignalHandler() { |
| 62 | sighandler_t ret = signal(SIGINT, SIG_DFL); |
| 63 | if (ret == SIG_ERR) { |
| 64 | PLOG(FATAL) << "Disabling SIGINT handler failed"; |
| 65 | } |
| 66 | ret = signal(SIGQUIT, SIG_DFL); |
| 67 | if (ret == SIG_ERR) { |
| 68 | PLOG(FATAL) << "Disabling SIGQUIT handler failed"; |
| 69 | } |
| 70 | } |
| 71 | |
| 72 | static std::string PluralizeString(size_t value, const char* name, bool uppercase = false) { |
| 73 | std::string string(std::to_string(value) + name); |
| 74 | if (value != 1) { |
| 75 | if (uppercase) { |
| 76 | string += 'S'; |
| 77 | } else { |
| 78 | string += 's'; |
| 79 | } |
| 80 | } |
| 81 | return string; |
| 82 | } |
| 83 | |
| 84 | void Isolate::EnumerateTests() { |
| 85 | // Only apply --gtest_filter if present. This is the only option that changes |
| 86 | // what tests are listed. |
| 87 | std::string command(child_args_[0]); |
| 88 | if (!options_.filter().empty()) { |
| 89 | command += " --gtest_filter=" + options_.filter(); |
| 90 | } |
| 91 | command += " --gtest_list_tests"; |
| 92 | FILE* fp = popen(command.c_str(), "re"); |
| 93 | if (fp == nullptr) { |
| 94 | PLOG(FATAL) << "Unexpected failure from popen"; |
| 95 | } |
| 96 | |
| 97 | bool skip_until_next_case = false; |
| 98 | std::string case_name; |
| 99 | char* buffer = nullptr; |
| 100 | size_t buffer_len = 0; |
| 101 | bool new_case = false; |
| 102 | while (getline(&buffer, &buffer_len, fp) > 0) { |
| 103 | if (buffer[0] != ' ') { |
| 104 | // This is the case name. |
| 105 | case_name = buffer; |
| 106 | auto space_index = case_name.find(' '); |
| 107 | if (space_index != std::string::npos) { |
| 108 | case_name.erase(space_index); |
| 109 | } |
| 110 | if (case_name.back() == '\n') { |
| 111 | case_name.resize(case_name.size() - 1); |
| 112 | } |
| 113 | |
| 114 | if (!options_.allow_disabled_tests() && android::base::StartsWith(case_name, "DISABLED_")) { |
| 115 | // This whole set of tests have been disabled, skip them all. |
| 116 | skip_until_next_case = true; |
| 117 | } else { |
| 118 | new_case = true; |
| 119 | skip_until_next_case = false; |
| 120 | } |
| 121 | } else if (buffer[0] == ' ' && buffer[1] == ' ') { |
| 122 | if (!skip_until_next_case) { |
| 123 | std::string test_name = &buffer[2]; |
| 124 | auto space_index = test_name.find(' '); |
| 125 | if (space_index != std::string::npos) { |
| 126 | test_name.erase(space_index); |
| 127 | } |
| 128 | if (test_name.back() == '\n') { |
| 129 | test_name.resize(test_name.size() - 1); |
| 130 | } |
| 131 | if (options_.allow_disabled_tests() || !android::base::StartsWith(test_name, "DISABLED_")) { |
| 132 | tests_.push_back(std::make_tuple(case_name, test_name)); |
| 133 | total_tests_++; |
| 134 | if (new_case) { |
| 135 | // Only increment the number of cases when we find at least one test |
| 136 | // for the cases. |
| 137 | total_cases_++; |
| 138 | new_case = false; |
| 139 | } |
| 140 | } else { |
| 141 | total_disable_tests_++; |
| 142 | } |
| 143 | } else { |
| 144 | total_disable_tests_++; |
| 145 | } |
| 146 | } else { |
| 147 | printf("Unexpected output from test listing.\nCommand:\n%s\nLine:\n%s\n", command.c_str(), |
| 148 | buffer); |
| 149 | exit(1); |
| 150 | } |
| 151 | } |
| 152 | free(buffer); |
| 153 | if (pclose(fp) == -1) { |
| 154 | PLOG(FATAL) << "Unexpected failure from pclose"; |
| 155 | } |
| 156 | } |
| 157 | |
| 158 | int Isolate::ChildProcessFn(const std::tuple<std::string, std::string>& test) { |
| 159 | // Make sure the filter is only coming from our command-line option. |
| 160 | unsetenv("GTEST_FILTER"); |
| 161 | |
| 162 | // Add the filter argument. |
| 163 | std::vector<const char*> args(child_args_); |
| 164 | std::string filter("--gtest_filter=" + GetTestName(test)); |
| 165 | args.push_back(filter.c_str()); |
| 166 | |
| 167 | int argc = args.size(); |
| 168 | // Add the null terminator. |
| 169 | args.push_back(nullptr); |
| 170 | ::testing::InitGoogleTest(&argc, const_cast<char**>(args.data())); |
| 171 | return RUN_ALL_TESTS(); |
| 172 | } |
| 173 | |
| 174 | void Isolate::LaunchTests() { |
| 175 | while (!running_indices_.empty() && cur_test_index_ < tests_.size()) { |
| 176 | android::base::unique_fd read_fd, write_fd; |
| 177 | if (!Pipe(&read_fd, &write_fd)) { |
| 178 | PLOG(FATAL) << "Unexpected failure from pipe"; |
| 179 | } |
| 180 | if (fcntl(read_fd.get(), F_SETFL, O_NONBLOCK) == -1) { |
| 181 | PLOG(FATAL) << "Unexpected failure from fcntl"; |
| 182 | } |
| 183 | |
| 184 | pid_t pid = fork(); |
| 185 | if (pid == -1) { |
| 186 | PLOG(FATAL) << "Unexpected failure from fork"; |
| 187 | } |
| 188 | if (pid == 0) { |
| 189 | read_fd.reset(); |
| 190 | close(STDOUT_FILENO); |
| 191 | close(STDERR_FILENO); |
| 192 | if (dup2(write_fd, STDOUT_FILENO) == -1) { |
| 193 | exit(1); |
| 194 | } |
| 195 | if (dup2(write_fd, STDERR_FILENO) == -1) { |
| 196 | exit(1); |
| 197 | } |
| 198 | UnregisterSignalHandler(); |
| 199 | exit(ChildProcessFn(tests_[cur_test_index_])); |
| 200 | } |
| 201 | |
| 202 | size_t run_index = running_indices_.top(); |
| 203 | running_indices_.pop(); |
| 204 | Test* test = new Test(tests_[cur_test_index_], cur_test_index_, run_index, read_fd.release()); |
| 205 | running_[run_index] = test; |
| 206 | running_by_pid_[pid] = test; |
| 207 | running_by_test_index_[cur_test_index_] = test; |
| 208 | |
| 209 | pollfd* pollfd = &running_pollfds_[run_index]; |
| 210 | pollfd->fd = test->fd(); |
| 211 | pollfd->events = POLLIN; |
| 212 | cur_test_index_++; |
| 213 | } |
| 214 | } |
| 215 | |
| 216 | void Isolate::ReadTestsOutput() { |
| 217 | int ready = poll(running_pollfds_.data(), running_pollfds_.size(), 0); |
| 218 | if (ready <= 0) { |
| 219 | return; |
| 220 | } |
| 221 | |
| 222 | for (size_t i = 0; i < running_pollfds_.size(); i++) { |
| 223 | pollfd* pfd = &running_pollfds_[i]; |
| 224 | if (pfd->revents & POLLIN) { |
| 225 | Test* test = running_[i]; |
| 226 | if (!test->Read()) { |
| 227 | test->CloseFd(); |
| 228 | pfd->fd = 0; |
| 229 | pfd->events = 0; |
| 230 | } |
| 231 | } |
| 232 | pfd->revents = 0; |
| 233 | } |
| 234 | } |
| 235 | |
| 236 | size_t Isolate::CheckTestsFinished() { |
| 237 | size_t finished_tests = 0; |
| 238 | int status; |
| 239 | pid_t pid; |
| 240 | while ((pid = TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG))) > 0) { |
| 241 | auto entry = running_by_pid_.find(pid); |
| 242 | if (entry == running_by_pid_.end()) { |
| 243 | LOG(FATAL) << "Pid " << pid << " was not spawned by the isolation framework."; |
| 244 | } |
| 245 | |
| 246 | Test* test = entry->second; |
| 247 | test->Stop(); |
| 248 | |
| 249 | // Read any leftover data. |
| 250 | test->ReadUntilClosed(); |
| 251 | if (test->result() == TEST_NONE) { |
| 252 | if (WIFSIGNALED(status)) { |
| 253 | std::string output(test->name() + " terminated by signal: " + strsignal(WTERMSIG(status)) + |
| 254 | ".\n"); |
| 255 | test->AppendOutput(output); |
| 256 | test->set_result(TEST_FAIL); |
| 257 | } else { |
| 258 | int exit_code = WEXITSTATUS(status); |
| 259 | if (exit_code != 0) { |
| 260 | std::string output(test->name() + " exited with exitcode " + std::to_string(exit_code) + |
| 261 | ".\n"); |
| 262 | test->AppendOutput(output); |
| 263 | test->set_result(TEST_FAIL); |
| 264 | } else { |
| 265 | test->set_result(TEST_PASS); |
| 266 | } |
| 267 | } |
| 268 | } else if (test->result() == TEST_TIMEOUT) { |
| 269 | uint64_t time_ms = options_.deadline_threshold_ms(); |
| 270 | std::string timeout_str(test->name() + " killed because of timeout at " + |
| 271 | std::to_string(time_ms) + " ms.\n"); |
| 272 | test->AppendOutput(timeout_str); |
| 273 | } |
| 274 | |
| 275 | if (test->ExpectFail()) { |
| 276 | if (test->result() == TEST_FAIL) { |
| 277 | // The test is expected to fail, it failed. |
| 278 | test->set_result(TEST_XFAIL); |
| 279 | } else if (test->result() == TEST_PASS) { |
| 280 | // The test is expected to fail, it passed. |
| 281 | test->set_result(TEST_XPASS); |
| 282 | } |
| 283 | } |
| 284 | |
| 285 | test->Print(options_.gtest_format()); |
| 286 | |
| 287 | switch (test->result()) { |
| 288 | case TEST_PASS: |
| 289 | total_pass_tests_++; |
| 290 | if (test->slow()) { |
| 291 | total_slow_tests_++; |
| 292 | } |
| 293 | break; |
| 294 | case TEST_XPASS: |
| 295 | total_xpass_tests_++; |
| 296 | break; |
| 297 | case TEST_FAIL: |
| 298 | total_fail_tests_++; |
| 299 | break; |
| 300 | case TEST_TIMEOUT: |
| 301 | total_timeout_tests_++; |
| 302 | break; |
| 303 | case TEST_XFAIL: |
| 304 | total_xfail_tests_++; |
| 305 | break; |
| 306 | case TEST_NONE: |
| 307 | LOG(FATAL) << "Test result is TEST_NONE, this should not be possible."; |
| 308 | } |
| 309 | finished_tests++; |
| 310 | size_t test_index = test->test_index(); |
| 311 | finished_.emplace(test_index, test); |
| 312 | running_indices_.push(test->run_index()); |
| 313 | |
| 314 | // Remove it from all of the running indices. |
| 315 | size_t run_index = test->run_index(); |
| 316 | if (running_by_pid_.erase(pid) != 1) { |
| 317 | printf("Internal error: Erasing pid %d from running_by_pid_ incorrect\n", pid); |
| 318 | } |
| 319 | if (running_by_test_index_.erase(test_index) == 0) { |
| 320 | printf("Internal error: Erasing test_index %zu from running_by_pid_ incorrect\n", test_index); |
| 321 | } |
| 322 | running_[run_index] = nullptr; |
| 323 | running_pollfds_[run_index] = {}; |
| 324 | } |
| 325 | |
| 326 | // The only valid error case is if ECHILD is returned because there are |
| 327 | // no more processes left running. |
| 328 | if (pid == -1 && errno != ECHILD) { |
| 329 | PLOG(FATAL) << "Unexpected failure from waitpid"; |
| 330 | } |
| 331 | return finished_tests; |
| 332 | } |
| 333 | |
| 334 | void Isolate::CheckTestsTimeout() { |
| 335 | for (auto& entry : running_by_pid_) { |
| 336 | Test* test = entry.second; |
| 337 | if (test->result() == TEST_TIMEOUT) { |
| 338 | continue; |
| 339 | } |
| 340 | |
| 341 | if (NanoTime() > test->start_ns() + deadline_threshold_ns_) { |
| 342 | test->set_result(TEST_TIMEOUT); |
| 343 | // Do not mark this as slow and timed out. |
| 344 | test->set_slow(false); |
| 345 | // Test gets cleaned up in CheckTestsFinished. |
| 346 | kill(entry.first, SIGKILL); |
| 347 | } else if (!test->slow() && NanoTime() > test->start_ns() + slow_threshold_ns_) { |
| 348 | // Mark the test as running slow. |
| 349 | test->set_slow(true); |
| 350 | } |
| 351 | } |
| 352 | } |
| 353 | |
| 354 | void Isolate::HandleSignals() { |
| 355 | int signal = g_signal.exchange(0); |
| 356 | if (signal == SIGINT) { |
| 357 | printf("Terminating due to signal...\n"); |
| 358 | for (auto& entry : running_by_pid_) { |
| 359 | kill(entry.first, SIGKILL); |
| 360 | } |
| 361 | exit(1); |
| 362 | } else if (signal == SIGQUIT) { |
| 363 | printf("List of current running tests:\n"); |
| 364 | for (const auto& entry : running_by_test_index_) { |
| 365 | const Test* test = entry.second; |
| 366 | uint64_t run_time_ms = (NanoTime() - test->start_ns()) / kNsPerMs; |
| 367 | printf(" %s (elapsed time %" PRId64 " ms)\n", test->name().c_str(), run_time_ms); |
| 368 | } |
| 369 | } |
| 370 | } |
| 371 | |
| 372 | void Isolate::RunAllTests() { |
| 373 | total_pass_tests_ = 0; |
| 374 | total_xpass_tests_ = 0; |
| 375 | total_fail_tests_ = 0; |
| 376 | total_xfail_tests_ = 0; |
| 377 | total_timeout_tests_ = 0; |
| 378 | total_slow_tests_ = 0; |
| 379 | |
| 380 | running_by_test_index_.clear(); |
| 381 | |
| 382 | size_t job_count = options_.job_count(); |
| 383 | running_.clear(); |
| 384 | running_.resize(job_count); |
| 385 | running_pollfds_.resize(job_count); |
| 386 | memset(running_pollfds_.data(), 0, running_pollfds_.size() * sizeof(pollfd)); |
| 387 | for (size_t i = 0; i < job_count; i++) { |
| 388 | running_indices_.push(i); |
| 389 | } |
| 390 | |
| 391 | finished_.clear(); |
| 392 | |
| 393 | size_t finished = 0; |
| 394 | cur_test_index_ = 0; |
| 395 | while (finished < tests_.size()) { |
| 396 | LaunchTests(); |
| 397 | |
| 398 | ReadTestsOutput(); |
| 399 | |
| 400 | finished += CheckTestsFinished(); |
| 401 | |
| 402 | CheckTestsTimeout(); |
| 403 | |
| 404 | HandleSignals(); |
| 405 | |
| 406 | usleep(MIN_USECONDS_WAIT); |
| 407 | } |
| 408 | } |
| 409 | |
| 410 | void Isolate::PrintResults(size_t total, const char* color, const char* prefix, const char* title, |
| 411 | std::string* footer, bool (*match_func)(const Test&), |
| 412 | void (*print_func)(const Options&, const Test&)) { |
| 413 | ColoredPrintf(color, prefix); |
| 414 | printf(" %s, listed below:\n", PluralizeString(total, " test").c_str()); |
| 415 | for (const auto& entry : finished_) { |
| 416 | const Test* test = entry.second; |
| 417 | if (match_func(*test)) { |
| 418 | ColoredPrintf(color, prefix); |
| 419 | printf(" %s", test->name().c_str()); |
| 420 | if (print_func) { |
| 421 | print_func(options_, *test); |
| 422 | } |
| 423 | printf("\n"); |
| 424 | } |
| 425 | } |
| 426 | if (total < 10) { |
| 427 | *footer += ' '; |
| 428 | } |
| 429 | *footer += PluralizeString(total, (std::string(" ") + title + " TEST").c_str(), true) + '\n'; |
| 430 | } |
| 431 | |
| 432 | void Isolate::PrintFooter(uint64_t elapsed_time_ns) { |
| 433 | ColoredPrintf(COLOR_GREEN, "[==========]"); |
| 434 | printf(" %s from %s ran. (%" PRId64 " ms total)\n", |
| 435 | PluralizeString(total_tests_, " test").c_str(), |
| 436 | PluralizeString(total_cases_, " test case").c_str(), elapsed_time_ns / kNsPerMs); |
| 437 | |
| 438 | ColoredPrintf(COLOR_GREEN, "[ PASS ]"); |
| 439 | printf(" %s.", PluralizeString(total_pass_tests_ + total_xfail_tests_, " test").c_str()); |
| 440 | if (total_xfail_tests_ != 0) { |
| 441 | printf(" (%s)", PluralizeString(total_xfail_tests_, " expected failure").c_str()); |
| 442 | } |
| 443 | printf("\n"); |
| 444 | |
| 445 | std::string footer; |
| 446 | // Tests that ran slow. |
| 447 | if (total_slow_tests_ != 0) { |
| 448 | PrintResults(total_slow_tests_, COLOR_YELLOW, "[ SLOW ]", "SLOW", &footer, |
| 449 | [](const Test& test) { return test.slow(); }, |
| 450 | [](const Options& options, const Test& test) { |
| 451 | printf(" (%" PRIu64 " ms, exceeded %" PRIu64 " ms)", test.RunTimeNs() / kNsPerMs, |
| 452 | options.slow_threshold_ms()); |
| 453 | }); |
| 454 | } |
| 455 | |
| 456 | // Tests that passed but should have failed. |
| 457 | if (total_xpass_tests_ != 0) { |
| 458 | PrintResults(total_xpass_tests_, COLOR_RED, "[ XPASS ]", "SHOULD HAVE FAILED", &footer, |
| 459 | [](const Test& test) { return test.result() == TEST_XPASS; }, nullptr); |
| 460 | } |
| 461 | |
| 462 | // Tests that timed out. |
| 463 | if (total_timeout_tests_ != 0) { |
| 464 | PrintResults(total_timeout_tests_, COLOR_RED, "[ TIMEOUT ]", "TIMEOUT", &footer, |
| 465 | [](const Test& test) { return test.result() == TEST_TIMEOUT; }, |
| 466 | [](const Options&, const Test& test) { |
| 467 | printf(" (stopped at %" PRIu64 " ms)", test.RunTimeNs() / kNsPerMs); |
| 468 | }); |
| 469 | } |
| 470 | |
| 471 | // Tests that failed. |
| 472 | if (total_fail_tests_ != 0) { |
| 473 | PrintResults(total_fail_tests_, COLOR_RED, "[ FAIL ]", "FAILED", &footer, |
| 474 | [](const Test& test) { return test.result() == TEST_FAIL; }, nullptr); |
| 475 | } |
| 476 | |
| 477 | if (!footer.empty()) { |
| 478 | printf("\n%s", footer.c_str()); |
| 479 | } |
| 480 | |
| 481 | if (total_disable_tests_ != 0) { |
| 482 | if (footer.empty()) { |
| 483 | printf("\n"); |
| 484 | } |
| 485 | ColoredPrintf(COLOR_YELLOW, " YOU HAVE %s\n\n", |
| 486 | PluralizeString(total_disable_tests_, " DISABLED TEST", true).c_str()); |
| 487 | } |
| 488 | |
| 489 | fflush(stdout); |
| 490 | } |
| 491 | |
| 492 | std::string XmlEscape(const std::string& xml) { |
| 493 | std::string escaped; |
| 494 | escaped.reserve(xml.size()); |
| 495 | |
| 496 | for (auto c : xml) { |
| 497 | switch (c) { |
| 498 | case '<': |
| 499 | escaped.append("<"); |
| 500 | break; |
| 501 | case '>': |
| 502 | escaped.append(">"); |
| 503 | break; |
| 504 | case '&': |
| 505 | escaped.append("&"); |
| 506 | break; |
| 507 | case '\'': |
| 508 | escaped.append("'"); |
| 509 | break; |
| 510 | case '"': |
| 511 | escaped.append("""); |
| 512 | break; |
| 513 | default: |
| 514 | escaped.append(1, c); |
| 515 | break; |
| 516 | } |
| 517 | } |
| 518 | |
| 519 | return escaped; |
| 520 | } |
| 521 | |
| 522 | class TestResultPrinter : public ::testing::EmptyTestEventListener { |
| 523 | public: |
| 524 | TestResultPrinter() : pinfo_(nullptr) {} |
| 525 | virtual void OnTestStart(const ::testing::TestInfo& test_info) { |
| 526 | pinfo_ = &test_info; // Record test_info for use in OnTestPartResult. |
| 527 | } |
| 528 | virtual void OnTestPartResult(const ::testing::TestPartResult& result); |
| 529 | |
| 530 | private: |
| 531 | const ::testing::TestInfo* pinfo_; |
| 532 | }; |
| 533 | |
| 534 | // Called after an assertion failure. |
| 535 | void TestResultPrinter::OnTestPartResult(const ::testing::TestPartResult& result) { |
| 536 | // If the test part succeeded, we don't need to do anything. |
| 537 | if (result.type() == ::testing::TestPartResult::kSuccess) { |
| 538 | return; |
| 539 | } |
| 540 | |
| 541 | // Print failure message from the assertion (e.g. expected this and got that). |
| 542 | printf("%s:(%d) Failure in test %s.%s\n%s\n", result.file_name(), result.line_number(), |
| 543 | pinfo_->test_case_name(), pinfo_->name(), result.message()); |
| 544 | fflush(stdout); |
| 545 | } |
| 546 | |
| 547 | // Output xml file when --gtest_output is used, write this function as we can't reuse |
| 548 | // gtest.cc:XmlUnitTestResultPrinter. The reason is XmlUnitTestResultPrinter is totally |
| 549 | // defined in gtest.cc and not expose to outside. What's more, as we don't run gtest in |
| 550 | // the parent process, we don't have gtest classes which are needed by XmlUnitTestResultPrinter. |
| 551 | void Isolate::WriteXmlResults(uint64_t elapsed_time_ns, time_t start_time) { |
| 552 | FILE* fp = fopen(options_.xml_file().c_str(), "w"); |
| 553 | if (fp == nullptr) { |
| 554 | printf("Cannot open xml file '%s': %s\n", options_.xml_file().c_str(), strerror(errno)); |
| 555 | exit(1); |
| 556 | } |
| 557 | |
| 558 | const tm* time_struct = localtime(&start_time); |
| 559 | if (time_struct == nullptr) { |
| 560 | PLOG(FATAL) << "Unexpected failure from localtime"; |
| 561 | } |
| 562 | char timestamp[40]; |
| 563 | snprintf(timestamp, sizeof(timestamp), "%4d-%02d-%02dT%02d:%02d:%02d", |
| 564 | time_struct->tm_year + 1900, time_struct->tm_mon + 1, time_struct->tm_mday, |
| 565 | time_struct->tm_hour, time_struct->tm_min, time_struct->tm_sec); |
| 566 | |
| 567 | fputs("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n", fp); |
| 568 | fprintf(fp, "<testsuites tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"", |
| 569 | tests_.size(), total_fail_tests_ + total_timeout_tests_ + total_xpass_tests_); |
| 570 | fprintf(fp, " timestamp=\"%s\" time=\"%.3lf\" name=\"AllTests\">\n", timestamp, |
| 571 | double(elapsed_time_ns) / kNsPerMs); |
| 572 | |
| 573 | // Construct the cases information. |
| 574 | struct CaseInfo { |
| 575 | std::string case_name; |
| 576 | size_t fails = 0; |
| 577 | double elapsed_ms = 0; |
| 578 | std::vector<const Test*> tests; |
| 579 | }; |
| 580 | std::string last_case_name; |
| 581 | std::vector<CaseInfo> cases; |
| 582 | CaseInfo* info = nullptr; |
| 583 | for (const auto& entry : finished_) { |
| 584 | const Test* test = entry.second; |
| 585 | const std::string& case_name = test->case_name(); |
| 586 | if (test->result() == TEST_XFAIL) { |
| 587 | // Skip XFAIL tests. |
| 588 | continue; |
| 589 | } |
| 590 | if (last_case_name != case_name) { |
| 591 | CaseInfo case_info{.case_name = case_name.substr(0, case_name.size() - 1)}; |
| 592 | last_case_name = case_name; |
| 593 | cases.push_back(case_info); |
| 594 | info = &cases.back(); |
| 595 | } |
| 596 | info->tests.push_back(test); |
| 597 | info->elapsed_ms += double(test->RunTimeNs()) / kNsPerMs; |
| 598 | if (test->result() != TEST_PASS) { |
| 599 | info->fails++; |
| 600 | } |
| 601 | } |
| 602 | |
| 603 | for (auto& case_entry : cases) { |
| 604 | fprintf(fp, |
| 605 | " <testsuite name=\"%s\" tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"", |
| 606 | case_entry.case_name.c_str(), case_entry.tests.size(), case_entry.fails); |
| 607 | fprintf(fp, " time=\"%.3lf\">\n", case_entry.elapsed_ms); |
| 608 | |
| 609 | for (auto test : case_entry.tests) { |
| 610 | fprintf(fp, " <testcase name=\"%s\" status=\"run\" time=\"%.3lf\" classname=\"%s\"", |
| 611 | test->test_name().c_str(), double(test->RunTimeNs()) / kNsPerMs, |
| 612 | case_entry.case_name.c_str()); |
| 613 | if (test->result() == TEST_PASS) { |
| 614 | fputs(" />\n", fp); |
| 615 | } else { |
| 616 | fputs(">\n", fp); |
| 617 | const std::string escaped_output = XmlEscape(test->output()); |
| 618 | fprintf(fp, " <failure message=\"%s\" type=\"\">\n", escaped_output.c_str()); |
| 619 | fputs(" </failure>\n", fp); |
| 620 | fputs(" </testcase>\n", fp); |
| 621 | } |
| 622 | } |
| 623 | fputs(" </testsuite>\n", fp); |
| 624 | } |
| 625 | fputs("</testsuites>\n", fp); |
| 626 | fclose(fp); |
| 627 | } |
| 628 | |
| 629 | int Isolate::Run() { |
| 630 | slow_threshold_ns_ = options_.slow_threshold_ms() * kNsPerMs; |
| 631 | deadline_threshold_ns_ = options_.deadline_threshold_ms() * kNsPerMs; |
| 632 | |
| 633 | EnumerateTests(); |
| 634 | |
| 635 | // Stop default result printer to avoid environment setup/teardown information for each test. |
| 636 | ::testing::UnitTest::GetInstance()->listeners().Release( |
| 637 | ::testing::UnitTest::GetInstance()->listeners().default_result_printer()); |
| 638 | ::testing::UnitTest::GetInstance()->listeners().Append(new TestResultPrinter); |
| 639 | RegisterSignalHandler(); |
| 640 | |
| 641 | std::string job_info("Running " + PluralizeString(total_tests_, " test") + " from " + |
| 642 | PluralizeString(total_cases_, " test case") + " (" + |
| 643 | PluralizeString(options_.job_count(), " job") + ")."); |
| 644 | |
| 645 | int exit_code = 0; |
| 646 | for (int i = 0; options_.num_iterations() < 0 || i < options_.num_iterations(); i++) { |
| 647 | if (i > 0) { |
| 648 | printf("\nRepeating all tests (iteration %d) . . .\n\n", i + 1); |
| 649 | } |
| 650 | ColoredPrintf(COLOR_GREEN, "[==========]"); |
| 651 | printf(" %s\n", job_info.c_str()); |
| 652 | fflush(stdout); |
| 653 | |
| 654 | time_t start_time = time(nullptr); |
| 655 | uint64_t time_ns = NanoTime(); |
| 656 | RunAllTests(); |
| 657 | time_ns = NanoTime() - time_ns; |
| 658 | |
| 659 | PrintFooter(time_ns); |
| 660 | |
| 661 | if (!options_.xml_file().empty()) { |
| 662 | WriteXmlResults(time_ns, start_time); |
| 663 | } |
| 664 | |
| 665 | if (total_pass_tests_ + total_xfail_tests_ != tests_.size()) { |
| 666 | exit_code = 1; |
| 667 | } |
| 668 | } |
| 669 | |
| 670 | return exit_code; |
| 671 | } |
| 672 | |
| 673 | } // namespace gtest_extras |
| 674 | } // namespace android |