1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // TestSuite:
7 // Basic implementation of a test harness in ANGLE.
8
9 #include "TestSuite.h"
10
11 #include "common/debug.h"
12 #include "common/hash_containers.h"
13 #include "common/platform.h"
14 #include "common/string_utils.h"
15 #include "common/system_utils.h"
16 #include "util/Timer.h"
17
18 #include <stdlib.h>
19 #include <time.h>
20
21 #include <fstream>
22 #include <unordered_map>
23
24 #include <gtest/gtest.h>
25 #include <rapidjson/document.h>
26 #include <rapidjson/filewritestream.h>
27 #include <rapidjson/istreamwrapper.h>
28 #include <rapidjson/prettywriter.h>
29
30 // We directly call into a function to register the parameterized tests. This saves spinning up
31 // a subprocess with a new gtest filter.
32 #include <gtest/../../src/gtest-internal-inl.h>
33
34 namespace js = rapidjson;
35
36 namespace angle
37 {
38 namespace
39 {
40 constexpr char kBatchId[] = "--batch-id";
41 constexpr char kFilterFileArg[] = "--filter-file";
42 constexpr char kResultFileArg[] = "--results-file";
43 constexpr char kTestTimeoutArg[] = "--test-timeout";
44 constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
45 constexpr char kIsolatedOutDir[] = "--isolated-outdir";
46
47 constexpr char kStartedTestString[] = "[ RUN ] ";
48 constexpr char kPassedTestString[] = "[ OK ] ";
49 constexpr char kFailedTestString[] = "[ FAILED ] ";
50 constexpr char kSkippedTestString[] = "[ SKIPPED ] ";
51
52 constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
53
54 constexpr char kTSanOptionsEnvVar[] = "TSAN_OPTIONS";
55 constexpr char kUBSanOptionsEnvVar[] = "UBSAN_OPTIONS";
56
57 [[maybe_unused]] constexpr char kVkLoaderDisableDLLUnloadingEnvVar[] =
58 "VK_LOADER_DISABLE_DYNAMIC_LIBRARY_UNLOADING";
59
60 // Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
61 // Ideally we could use a separate timeout for the slow first test.
62 // Allow sanitized tests to run more slowly.
63 #if defined(NDEBUG) && !defined(ANGLE_WITH_SANITIZER)
64 constexpr int kDefaultTestTimeout = 60;
65 constexpr int kDefaultBatchTimeout = 300;
66 #else
67 constexpr int kDefaultTestTimeout = 120;
68 constexpr int kDefaultBatchTimeout = 700;
69 #endif
70 constexpr int kSlowTestTimeoutScale = 3;
71 constexpr int kDefaultBatchSize = 256;
72 constexpr double kIdleMessageTimeout = 15.0;
73 constexpr int kDefaultMaxProcesses = 16;
74 constexpr int kDefaultMaxFailures = 100;
75
ResultTypeToString(TestResultType type)76 const char *ResultTypeToString(TestResultType type)
77 {
78 switch (type)
79 {
80 case TestResultType::Crash:
81 return "CRASH";
82 case TestResultType::Fail:
83 return "FAIL";
84 case TestResultType::NoResult:
85 return "NOTRUN";
86 case TestResultType::Pass:
87 return "PASS";
88 case TestResultType::Skip:
89 return "SKIP";
90 case TestResultType::Timeout:
91 return "TIMEOUT";
92 case TestResultType::Unknown:
93 default:
94 return "UNKNOWN";
95 }
96 }
97
GetResultTypeFromString(const std::string & str)98 TestResultType GetResultTypeFromString(const std::string &str)
99 {
100 if (str == "CRASH")
101 return TestResultType::Crash;
102 if (str == "FAIL")
103 return TestResultType::Fail;
104 if (str == "PASS")
105 return TestResultType::Pass;
106 if (str == "NOTRUN")
107 return TestResultType::NoResult;
108 if (str == "SKIP")
109 return TestResultType::Skip;
110 if (str == "TIMEOUT")
111 return TestResultType::Timeout;
112 return TestResultType::Unknown;
113 }
114
IsFailedResult(TestResultType resultType)115 bool IsFailedResult(TestResultType resultType)
116 {
117 return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
118 }
119
ResultTypeToJSString(TestResultType type,js::Document::AllocatorType * allocator)120 js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
121 {
122 js::Value jsName;
123 jsName.SetString(ResultTypeToString(type), *allocator);
124 return jsName;
125 }
126
WriteJsonFile(const std::string & outputFile,js::Document * doc)127 bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
128 {
129 FILE *fp = fopen(outputFile.c_str(), "w");
130 if (!fp)
131 {
132 return false;
133 }
134
135 constexpr size_t kBufferSize = 0xFFFF;
136 std::vector<char> writeBuffer(kBufferSize);
137 js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
138 js::PrettyWriter<js::FileWriteStream> writer(os);
139 if (!doc->Accept(writer))
140 {
141 fclose(fp);
142 return false;
143 }
144 fclose(fp);
145 return true;
146 }
147
148 // Writes out a TestResults to the Chromium JSON Test Results format.
149 // https://chromium.googlesource.com/chromium/src.git/+/main/docs/testing/json_test_results_format.md
WriteResultsFile(bool interrupted,const TestResults & testResults,const std::string & outputFile)150 void WriteResultsFile(bool interrupted,
151 const TestResults &testResults,
152 const std::string &outputFile)
153 {
154 time_t ltime;
155 time(<ime);
156 struct tm *timeinfo = gmtime(<ime);
157 ltime = mktime(timeinfo);
158
159 uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
160
161 js::Document doc;
162 doc.SetObject();
163
164 js::Document::AllocatorType &allocator = doc.GetAllocator();
165
166 doc.AddMember("interrupted", interrupted, allocator);
167 doc.AddMember("path_delimiter", ".", allocator);
168 doc.AddMember("version", 3, allocator);
169 doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
170
171 js::Value tests;
172 tests.SetObject();
173
174 // If we have any test artifacts, make a fake test to house them.
175 if (!testResults.testArtifactPaths.empty())
176 {
177 js::Value artifactsTest;
178 artifactsTest.SetObject();
179
180 artifactsTest.AddMember("actual", "PASS", allocator);
181 artifactsTest.AddMember("expected", "PASS", allocator);
182
183 js::Value artifacts;
184 artifacts.SetObject();
185
186 for (const std::string &testArtifactPath : testResults.testArtifactPaths)
187 {
188 std::vector<std::string> pieces =
189 SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
190 SplitResult::SPLIT_WANT_NONEMPTY);
191 ASSERT(!pieces.empty());
192
193 js::Value basename;
194 basename.SetString(pieces.back(), allocator);
195
196 js::Value artifactPath;
197 artifactPath.SetString(testArtifactPath, allocator);
198
199 js::Value artifactArray;
200 artifactArray.SetArray();
201 artifactArray.PushBack(artifactPath, allocator);
202
203 artifacts.AddMember(basename, artifactArray, allocator);
204 }
205
206 artifactsTest.AddMember("artifacts", artifacts, allocator);
207
208 js::Value fakeTestName;
209 fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
210 tests.AddMember(fakeTestName, artifactsTest, allocator);
211 }
212
213 std::map<TestResultType, uint32_t> counts;
214
215 for (const auto &resultIter : testResults.results)
216 {
217 const TestIdentifier &id = resultIter.first;
218 const TestResult &result = resultIter.second;
219
220 js::Value jsResult;
221 jsResult.SetObject();
222
223 counts[result.type]++;
224
225 std::string actualResult;
226 for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
227 {
228 actualResult += "FAIL ";
229 }
230
231 actualResult += ResultTypeToString(result.type);
232
233 std::string expectedResult = "PASS";
234 if (result.type == TestResultType::Skip)
235 {
236 expectedResult = "SKIP";
237 }
238
239 // Handle flaky passing tests.
240 if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
241 {
242 expectedResult = "FAIL PASS";
243 jsResult.AddMember("is_flaky", true, allocator);
244 }
245
246 jsResult.AddMember("actual", actualResult, allocator);
247 jsResult.AddMember("expected", expectedResult, allocator);
248
249 if (IsFailedResult(result.type))
250 {
251 jsResult.AddMember("is_unexpected", true, allocator);
252 }
253
254 js::Value times;
255 times.SetArray();
256 for (double elapsedTimeSeconds : result.elapsedTimeSeconds)
257 {
258 times.PushBack(elapsedTimeSeconds, allocator);
259 }
260
261 jsResult.AddMember("times", times, allocator);
262
263 char testName[500];
264 id.snprintfName(testName, sizeof(testName));
265 js::Value jsName;
266 jsName.SetString(testName, allocator);
267
268 tests.AddMember(jsName, jsResult, allocator);
269 }
270
271 js::Value numFailuresByType;
272 numFailuresByType.SetObject();
273
274 for (const auto &countIter : counts)
275 {
276 TestResultType type = countIter.first;
277 uint32_t count = countIter.second;
278
279 js::Value jsCount(count);
280 numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
281 }
282
283 doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
284
285 doc.AddMember("tests", tests, allocator);
286
287 printf("Writing test results to %s\n", outputFile.c_str());
288
289 if (!WriteJsonFile(outputFile, &doc))
290 {
291 printf("Error writing test results file.\n");
292 }
293 }
294
WriteHistogramJson(const HistogramWriter & histogramWriter,const std::string & outputFile)295 void WriteHistogramJson(const HistogramWriter &histogramWriter, const std::string &outputFile)
296 {
297 js::Document doc;
298 doc.SetArray();
299
300 histogramWriter.getAsJSON(&doc);
301
302 printf("Writing histogram json to %s\n", outputFile.c_str());
303
304 if (!WriteJsonFile(outputFile, &doc))
305 {
306 printf("Error writing histogram json file.\n");
307 }
308 }
309
UpdateCurrentTestResult(const testing::TestResult & resultIn,TestResults * resultsOut)310 void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
311 {
312 TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
313
314 // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
315 if (resultIn.Skipped())
316 {
317 resultOut.type = TestResultType::Skip;
318 }
319 else if (resultIn.Failed())
320 {
321 resultOut.type = TestResultType::Fail;
322 }
323 else
324 {
325 resultOut.type = TestResultType::Pass;
326 }
327
328 resultOut.elapsedTimeSeconds.back() = resultsOut->currentTestTimer.getElapsedWallClockTime();
329 }
330
GetTestIdentifier(const testing::TestInfo & testInfo)331 TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
332 {
333 return {testInfo.test_suite_name(), testInfo.name()};
334 }
335
IsTestDisabled(const testing::TestInfo & testInfo)336 bool IsTestDisabled(const testing::TestInfo &testInfo)
337 {
338 return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
339 }
340
341 using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
342
FilterTests(std::map<TestIdentifier,FileLine> * fileLinesOut,TestIdentifierFilter filter,bool alsoRunDisabledTests)343 std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
344 TestIdentifierFilter filter,
345 bool alsoRunDisabledTests)
346 {
347 std::vector<TestIdentifier> tests;
348
349 const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
350 for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
351 {
352 const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
353 for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
354 {
355 const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
356 TestIdentifier id = GetTestIdentifier(testInfo);
357 if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
358 {
359 tests.emplace_back(id);
360
361 if (fileLinesOut)
362 {
363 (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
364 }
365 }
366 }
367 }
368
369 return tests;
370 }
371
GetFilteredTests(std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)372 std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
373 bool alsoRunDisabledTests)
374 {
375 TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
376 return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
377 };
378
379 return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
380 }
381
GetShardTests(const std::vector<TestIdentifier> & allTests,int shardIndex,int shardCount,std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)382 std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
383 int shardIndex,
384 int shardCount,
385 std::map<TestIdentifier, FileLine> *fileLinesOut,
386 bool alsoRunDisabledTests)
387 {
388 std::vector<TestIdentifier> shardTests;
389
390 for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
391 testIndex += shardCount)
392 {
393 shardTests.emplace_back(allTests[testIndex]);
394 }
395
396 return shardTests;
397 }
398
GetTestFilter(const std::vector<TestIdentifier> & tests)399 std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
400 {
401 std::stringstream filterStream;
402
403 filterStream << "--gtest_filter=";
404
405 for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
406 {
407 if (testIndex != 0)
408 {
409 filterStream << ":";
410 }
411
412 filterStream << ReplaceDashesWithQuestionMark(tests[testIndex].testSuiteName) << "."
413 << ReplaceDashesWithQuestionMark(tests[testIndex].testName);
414 }
415
416 return filterStream.str();
417 }
418
GetTestArtifactsFromJSON(const js::Value::ConstObject & obj,std::vector<std::string> * testArtifactPathsOut)419 bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
420 std::vector<std::string> *testArtifactPathsOut)
421 {
422 if (!obj.HasMember("artifacts"))
423 {
424 printf("No artifacts member.\n");
425 return false;
426 }
427
428 const js::Value &jsArtifacts = obj["artifacts"];
429 if (!jsArtifacts.IsObject())
430 {
431 printf("Artifacts are not an object.\n");
432 return false;
433 }
434
435 const js::Value::ConstObject &artifacts = jsArtifacts.GetObj();
436 for (const auto &artifactMember : artifacts)
437 {
438 const js::Value &artifact = artifactMember.value;
439 if (!artifact.IsArray())
440 {
441 printf("Artifact is not an array of strings of size 1.\n");
442 return false;
443 }
444
445 const js::Value::ConstArray &artifactArray = artifact.GetArray();
446 if (artifactArray.Size() != 1)
447 {
448 printf("Artifact is not an array of strings of size 1.\n");
449 return false;
450 }
451
452 const js::Value &artifactName = artifactArray[0];
453 if (!artifactName.IsString())
454 {
455 printf("Artifact is not an array of strings of size 1.\n");
456 return false;
457 }
458
459 testArtifactPathsOut->push_back(artifactName.GetString());
460 }
461
462 return true;
463 }
464
GetSingleTestResultFromJSON(const js::Value & name,const js::Value::ConstObject & obj,TestResults * resultsOut)465 bool GetSingleTestResultFromJSON(const js::Value &name,
466 const js::Value::ConstObject &obj,
467 TestResults *resultsOut)
468 {
469
470 TestIdentifier id;
471 if (!TestIdentifier::ParseFromString(name.GetString(), &id))
472 {
473 printf("Could not parse test identifier.\n");
474 return false;
475 }
476
477 if (!obj.HasMember("expected") || !obj.HasMember("actual"))
478 {
479 printf("No expected or actual member.\n");
480 return false;
481 }
482
483 const js::Value &expected = obj["expected"];
484 const js::Value &actual = obj["actual"];
485
486 if (!expected.IsString() || !actual.IsString())
487 {
488 printf("Expected or actual member is not a string.\n");
489 return false;
490 }
491
492 const std::string actualStr = actual.GetString();
493
494 TestResultType resultType = TestResultType::Unknown;
495 int flakyFailures = 0;
496 if (actualStr.find(' '))
497 {
498 std::istringstream strstr(actualStr);
499 std::string token;
500 while (std::getline(strstr, token, ' '))
501 {
502 resultType = GetResultTypeFromString(token);
503 if (resultType == TestResultType::Unknown)
504 {
505 printf("Failed to parse result type.\n");
506 return false;
507 }
508 if (IsFailedResult(resultType))
509 {
510 flakyFailures++;
511 }
512 }
513 }
514 else
515 {
516 resultType = GetResultTypeFromString(actualStr);
517 if (resultType == TestResultType::Unknown)
518 {
519 printf("Failed to parse result type.\n");
520 return false;
521 }
522 }
523
524 std::vector<double> elapsedTimeSeconds;
525 if (obj.HasMember("times"))
526 {
527 const js::Value × = obj["times"];
528 if (!times.IsArray())
529 {
530 return false;
531 }
532
533 const js::Value::ConstArray ×Array = times.GetArray();
534 if (timesArray.Size() < 1)
535 {
536 return false;
537 }
538 for (const js::Value &time : timesArray)
539 {
540 if (!time.IsDouble())
541 {
542 return false;
543 }
544
545 elapsedTimeSeconds.push_back(time.GetDouble());
546 }
547 }
548
549 TestResult &result = resultsOut->results[id];
550 result.elapsedTimeSeconds = elapsedTimeSeconds;
551 result.type = resultType;
552 result.flakyFailures = flakyFailures;
553 return true;
554 }
555
GetTestResultsFromJSON(const js::Document & document,TestResults * resultsOut)556 bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
557 {
558 if (!document.HasMember("tests") || !document["tests"].IsObject())
559 {
560 printf("JSON document has no tests member.\n");
561 return false;
562 }
563
564 const js::Value::ConstObject &tests = document["tests"].GetObj();
565 for (const auto &testMember : tests)
566 {
567 // Get test identifier.
568 const js::Value &name = testMember.name;
569 if (!name.IsString())
570 {
571 printf("Name is not a string.\n");
572 return false;
573 }
574
575 // Get test result.
576 const js::Value &value = testMember.value;
577 if (!value.IsObject())
578 {
579 printf("Test result is not an object.\n");
580 return false;
581 }
582
583 const js::Value::ConstObject &obj = value.GetObj();
584
585 if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
586 {
587 if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
588 {
589 return false;
590 }
591 }
592 else
593 {
594 if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
595 {
596 return false;
597 }
598 }
599 }
600
601 return true;
602 }
603
MergeTestResults(TestResults * input,TestResults * output,int flakyRetries)604 bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
605 {
606 for (auto &resultsIter : input->results)
607 {
608 const TestIdentifier &id = resultsIter.first;
609 TestResult &inputResult = resultsIter.second;
610 TestResult &outputResult = output->results[id];
611
612 if (inputResult.type != TestResultType::NoResult)
613 {
614 if (outputResult.type != TestResultType::NoResult)
615 {
616 printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
617 id.testName.c_str());
618 return false;
619 }
620
621 // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
622 // attempt the test again.
623 uint32_t runCount = outputResult.flakyFailures + 1;
624 if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
625 {
626 printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
627 id.testName.c_str());
628 inputResult.type = TestResultType::NoResult;
629 outputResult.flakyFailures++;
630 }
631 else
632 {
633 outputResult.type = inputResult.type;
634 }
635 if (runCount == 1)
636 {
637 outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
638 }
639 else
640 {
641 outputResult.elapsedTimeSeconds.insert(outputResult.elapsedTimeSeconds.end(),
642 inputResult.elapsedTimeSeconds.begin(),
643 inputResult.elapsedTimeSeconds.end());
644 }
645 }
646 }
647
648 output->testArtifactPaths.insert(output->testArtifactPaths.end(),
649 input->testArtifactPaths.begin(),
650 input->testArtifactPaths.end());
651
652 return true;
653 }
654
PrintTestOutputSnippet(const TestIdentifier & id,const TestResult & result,const std::string & fullOutput)655 void PrintTestOutputSnippet(const TestIdentifier &id,
656 const TestResult &result,
657 const std::string &fullOutput)
658 {
659 std::stringstream nameStream;
660 nameStream << id;
661 std::string fullName = nameStream.str();
662
663 size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
664 if (runPos == std::string::npos)
665 {
666 printf("Cannot locate test output snippet.\n");
667 return;
668 }
669
670 size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
671 // Only clip the snippet to the "OK" message if the test really
672 // succeeded. It still might have e.g. crashed after printing it.
673 if (endPos == std::string::npos && result.type == TestResultType::Pass)
674 {
675 endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
676 }
677 if (endPos != std::string::npos)
678 {
679 size_t newline_pos = fullOutput.find("\n", endPos);
680 if (newline_pos != std::string::npos)
681 endPos = newline_pos + 1;
682 }
683
684 std::cout << "\n";
685 if (endPos != std::string::npos)
686 {
687 std::cout << fullOutput.substr(runPos, endPos - runPos);
688 }
689 else
690 {
691 std::cout << fullOutput.substr(runPos);
692 }
693 }
694
GetConfigNameFromTestIdentifier(const TestIdentifier & id)695 std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
696 {
697 size_t slashPos = id.testName.find('/');
698 if (slashPos == std::string::npos)
699 {
700 return "default";
701 }
702
703 size_t doubleUnderscorePos = id.testName.find("__");
704 if (doubleUnderscorePos == std::string::npos)
705 {
706 std::string configName = id.testName.substr(slashPos + 1);
707
708 if (!BeginsWith(configName, "ES"))
709 {
710 return "default";
711 }
712
713 return configName;
714 }
715 else
716 {
717 return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
718 }
719 }
720
BatchTests(const std::vector<TestIdentifier> & tests,int batchSize)721 TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
722 {
723 // First sort tests by configuration.
724 angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
725 for (const TestIdentifier &id : tests)
726 {
727 std::string config = GetConfigNameFromTestIdentifier(id);
728 testsSortedByConfig[config].push_back(id);
729 }
730
731 // Then group into batches by 'batchSize'.
732 TestQueue testQueue;
733 for (const auto &configAndIds : testsSortedByConfig)
734 {
735 const std::vector<TestIdentifier> &configTests = configAndIds.second;
736
737 // Count the number of batches needed for this config.
738 int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
739
740 // Create batches with striping to split up slow tests.
741 for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
742 {
743 std::vector<TestIdentifier> batchTests;
744 for (size_t testIndex = batchIndex; testIndex < configTests.size();
745 testIndex += batchesForConfig)
746 {
747 batchTests.push_back(configTests[testIndex]);
748 }
749 testQueue.emplace(std::move(batchTests));
750 ASSERT(batchTests.empty());
751 }
752 }
753
754 return testQueue;
755 }
756
ListTests(const std::map<TestIdentifier,TestResult> & resultsMap)757 void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
758 {
759 std::cout << "Tests list:\n";
760
761 for (const auto &resultIt : resultsMap)
762 {
763 const TestIdentifier &id = resultIt.first;
764 std::cout << id << "\n";
765 }
766
767 std::cout << "End tests list.\n";
768 }
769
770 // Prints the names of the tests matching the user-specified filter flag.
771 // This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
772 // See http://anglebug.com/42263725
GTestListTests(const std::map<TestIdentifier,TestResult> & resultsMap)773 void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
774 {
775 std::map<std::string, std::vector<std::string>> suites;
776
777 for (const auto &resultIt : resultsMap)
778 {
779 const TestIdentifier &id = resultIt.first;
780 suites[id.testSuiteName].push_back(id.testName);
781 }
782
783 for (const auto &testSuiteIt : suites)
784 {
785 bool printedTestSuiteName = false;
786
787 const std::string &suiteName = testSuiteIt.first;
788 const std::vector<std::string> &testNames = testSuiteIt.second;
789
790 for (const std::string &testName : testNames)
791 {
792 if (!printedTestSuiteName)
793 {
794 printedTestSuiteName = true;
795 printf("%s.\n", suiteName.c_str());
796 }
797 printf(" %s\n", testName.c_str());
798 }
799 }
800 }
801
802 // On Android, batching is done on the host, i.e. externally.
803 // TestSuite executes on the device and should just passthrough all args to GTest.
UsesExternalBatching()804 bool UsesExternalBatching()
805 {
806 #if defined(ANGLE_PLATFORM_ANDROID)
807 return true;
808 #else
809 return false;
810 #endif
811 }
812 } // namespace
813
enable(const std::string & testArtifactDirectory)814 void MetricWriter::enable(const std::string &testArtifactDirectory)
815 {
816 mPath = testArtifactDirectory + GetPathSeparator() + "angle_metrics";
817 }
818
writeInfo(const std::string & name,const std::string & backend,const std::string & story,const std::string & metric,const std::string & units)819 void MetricWriter::writeInfo(const std::string &name,
820 const std::string &backend,
821 const std::string &story,
822 const std::string &metric,
823 const std::string &units)
824 {
825 if (mPath.empty())
826 {
827 return;
828 }
829
830 if (mFile == nullptr)
831 {
832 mFile = fopen(mPath.c_str(), "w");
833 }
834 ASSERT(mFile != nullptr);
835
836 fprintf(mFile, "{\"name\":\"%s\",", name.c_str());
837 fprintf(mFile, "\"backend\":\"%s\",", backend.c_str());
838 fprintf(mFile, "\"story\":\"%s\",", story.c_str());
839 fprintf(mFile, "\"metric\":\"%s\",", metric.c_str());
840 fprintf(mFile, "\"units\":\"%s\",", units.c_str());
841 // followed by writing value, so no closing bracket yet
842 }
843
writeDoubleValue(double value)844 void MetricWriter::writeDoubleValue(double value)
845 {
846 if (mFile != nullptr)
847 {
848 fprintf(mFile, "\"value\":\"%lf\"}\n", value);
849 }
850 }
851
writeIntegerValue(size_t value)852 void MetricWriter::writeIntegerValue(size_t value)
853 {
854 if (mFile != nullptr)
855 {
856 fprintf(mFile, "\"value\":\"%zu\"}\n", value);
857 }
858 }
859
close()860 void MetricWriter::close()
861 {
862 if (mFile != nullptr)
863 {
864 fclose(mFile);
865 mFile = nullptr;
866 }
867 }
868
869 // static
870 TestSuite *TestSuite::mInstance = nullptr;
871
872 TestIdentifier::TestIdentifier() = default;
873
TestIdentifier(const std::string & suiteNameIn,const std::string & nameIn)874 TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
875 : testSuiteName(suiteNameIn), testName(nameIn)
876 {}
877
878 TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
879
880 TestIdentifier::~TestIdentifier() = default;
881
882 TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
883
snprintfName(char * outBuffer,size_t maxLen) const884 void TestIdentifier::snprintfName(char *outBuffer, size_t maxLen) const
885 {
886 snprintf(outBuffer, maxLen, "%s.%s", testSuiteName.c_str(), testName.c_str());
887 }
888
889 // static
ParseFromString(const std::string & str,TestIdentifier * idOut)890 bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
891 {
892 size_t separator = str.find(".");
893 if (separator == std::string::npos)
894 {
895 return false;
896 }
897
898 idOut->testSuiteName = str.substr(0, separator);
899 idOut->testName = str.substr(separator + 1, str.length() - separator - 1);
900 return true;
901 }
902
903 TestResults::TestResults() = default;
904
905 TestResults::~TestResults() = default;
906
907 ProcessInfo::ProcessInfo() = default;
908
operator =(ProcessInfo && rhs)909 ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
910 {
911 process = std::move(rhs.process);
912 testsInBatch = std::move(rhs.testsInBatch);
913 resultsFileName = std::move(rhs.resultsFileName);
914 filterFileName = std::move(rhs.filterFileName);
915 commandLine = std::move(rhs.commandLine);
916 filterString = std::move(rhs.filterString);
917 return *this;
918 }
919
920 ProcessInfo::~ProcessInfo() = default;
921
ProcessInfo(ProcessInfo && other)922 ProcessInfo::ProcessInfo(ProcessInfo &&other)
923 {
924 *this = std::move(other);
925 }
926
927 class TestSuite::TestEventListener : public testing::EmptyTestEventListener
928 {
929 public:
930 // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
TestEventListener(TestSuite * testSuite)931 TestEventListener(TestSuite *testSuite) : mTestSuite(testSuite) {}
932
OnTestStart(const testing::TestInfo & testInfo)933 void OnTestStart(const testing::TestInfo &testInfo) override
934 {
935 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
936 mTestSuite->mTestResults.currentTest = GetTestIdentifier(testInfo);
937 mTestSuite->mTestResults.currentTestTimer.start();
938 }
939
OnTestEnd(const testing::TestInfo & testInfo)940 void OnTestEnd(const testing::TestInfo &testInfo) override
941 {
942 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
943 mTestSuite->mTestResults.currentTestTimer.stop();
944 const testing::TestResult &resultIn = *testInfo.result();
945 UpdateCurrentTestResult(resultIn, &mTestSuite->mTestResults);
946 mTestSuite->mTestResults.currentTest = TestIdentifier();
947 }
948
OnTestProgramEnd(const testing::UnitTest & testProgramInfo)949 void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
950 {
951 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
952 mTestSuite->mTestResults.allDone = true;
953 mTestSuite->writeOutputFiles(false);
954 }
955
956 private:
957 TestSuite *mTestSuite;
958 };
959
TestSuite(int * argc,char ** argv)960 TestSuite::TestSuite(int *argc, char **argv) : TestSuite(argc, argv, []() {}) {}
961
TestSuite(int * argc,char ** argv,std::function<void ()> registerTestsCallback)962 TestSuite::TestSuite(int *argc, char **argv, std::function<void()> registerTestsCallback)
963 : mShardCount(-1),
964 mShardIndex(-1),
965 mBotMode(false),
966 mDebugTestGroups(false),
967 mGTestListTests(false),
968 mListTests(false),
969 mPrintTestStdout(false),
970 mDisableCrashHandler(false),
971 mBatchSize(kDefaultBatchSize),
972 mCurrentResultCount(0),
973 mTotalResultCount(0),
974 mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
975 mTestTimeout(kDefaultTestTimeout),
976 mBatchTimeout(kDefaultBatchTimeout),
977 mBatchId(-1),
978 mFlakyRetries(0),
979 mMaxFailures(kDefaultMaxFailures),
980 mFailureCount(0),
981 mModifiedPreferredDevice(false)
982 {
983 ASSERT(mInstance == nullptr);
984 mInstance = this;
985
986 Optional<int> filterArgIndex;
987 bool alsoRunDisabledTests = false;
988
989 #if defined(ANGLE_PLATFORM_MACOS)
990 // By default, we should hook file API functions on macOS to avoid slow Metal shader caching
991 // file access.
992 angle::InitMetalFileAPIHooking(*argc, argv);
993 #endif
994
995 #if defined(ANGLE_PLATFORM_WINDOWS)
996 GTEST_FLAG_SET(catch_exceptions, false);
997 #endif
998
999 if (*argc <= 0)
1000 {
1001 printf("Missing test arguments.\n");
1002 exit(EXIT_FAILURE);
1003 }
1004
1005 mTestExecutableName = argv[0];
1006
1007 for (int argIndex = 1; argIndex < *argc;)
1008 {
1009 if (parseSingleArg(argc, argv, argIndex))
1010 {
1011 continue;
1012 }
1013
1014 if (strstr(argv[argIndex], "--gtest_filter=") == argv[argIndex])
1015 {
1016 filterArgIndex = argIndex;
1017 }
1018 else
1019 {
1020 // Don't include disabled tests in test lists unless the user asks for them.
1021 if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1022 {
1023 alsoRunDisabledTests = true;
1024 }
1025
1026 mChildProcessArgs.push_back(argv[argIndex]);
1027 }
1028 ++argIndex;
1029 }
1030
1031 if (mTestArtifactDirectory.empty())
1032 {
1033 mTestArtifactDirectory = GetEnvironmentVar("ISOLATED_OUTDIR");
1034 }
1035
1036 #if defined(ANGLE_PLATFORM_FUCHSIA)
1037 if (mBotMode)
1038 {
1039 printf("Note: Bot mode is not available on Fuchsia. See http://anglebug.com/42265786\n");
1040 mBotMode = false;
1041 }
1042 #endif
1043
1044 if (UsesExternalBatching() && mBotMode)
1045 {
1046 printf("Bot mode is mutually exclusive with external batching.\n");
1047 exit(EXIT_FAILURE);
1048 }
1049
1050 mTestResults.currentTestTimeout = mTestTimeout;
1051
1052 if (!mDisableCrashHandler)
1053 {
1054 // Note that the crash callback must be owned and not use global constructors.
1055 mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1056 InitCrashHandler(&mCrashCallback);
1057 }
1058
1059 #if defined(ANGLE_PLATFORM_WINDOWS) || defined(ANGLE_PLATFORM_LINUX)
1060 if (IsASan())
1061 {
1062 // Set before `registerTestsCallback()` call
1063 SetEnvironmentVar(kVkLoaderDisableDLLUnloadingEnvVar, "1");
1064 }
1065 #endif
1066
1067 registerTestsCallback();
1068
1069 std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1070 if (!envShardIndex.empty())
1071 {
1072 angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1073 if (mShardIndex == -1)
1074 {
1075 std::stringstream shardIndexStream(envShardIndex);
1076 shardIndexStream >> mShardIndex;
1077 }
1078 }
1079
1080 std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1081 if (!envTotalShards.empty())
1082 {
1083 angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1084 if (mShardCount == -1)
1085 {
1086 std::stringstream shardCountStream(envTotalShards);
1087 shardCountStream >> mShardCount;
1088 }
1089 }
1090
1091 // The test harness reads the active GPU from SystemInfo and uses that for test expectations.
1092 // However, some ANGLE backends don't have a concept of an "active" GPU, and instead use power
1093 // preference to select GPU. We can use the environment variable ANGLE_PREFERRED_DEVICE to
1094 // ensure ANGLE's selected GPU matches the GPU expected for this test suite.
1095 const GPUTestConfig testConfig = GPUTestConfig();
1096 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1097 if (GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1098 {
1099 mModifiedPreferredDevice = true;
1100 const GPUTestConfig::ConditionArray &conditions = testConfig.getConditions();
1101 if (conditions[GPUTestConfig::kConditionAMD])
1102 {
1103 SetEnvironmentVar(kPreferredDeviceEnvVar, "amd");
1104 }
1105 else if (conditions[GPUTestConfig::kConditionNVIDIA])
1106 {
1107 SetEnvironmentVar(kPreferredDeviceEnvVar, "nvidia");
1108 }
1109 else if (conditions[GPUTestConfig::kConditionIntel])
1110 {
1111 SetEnvironmentVar(kPreferredDeviceEnvVar, "intel");
1112 }
1113 else if (conditions[GPUTestConfig::kConditionApple])
1114 {
1115 SetEnvironmentVar(kPreferredDeviceEnvVar, "apple");
1116 }
1117 }
1118
1119 // Special handling for TSAN and UBSAN to force crashes when run in automated testing.
1120 if (IsTSan())
1121 {
1122 std::string tsanOptions = GetEnvironmentVar(kTSanOptionsEnvVar);
1123 tsanOptions += " halt_on_error=1";
1124 SetEnvironmentVar(kTSanOptionsEnvVar, tsanOptions.c_str());
1125 }
1126
1127 if (IsUBSan())
1128 {
1129 std::string ubsanOptions = GetEnvironmentVar(kUBSanOptionsEnvVar);
1130 ubsanOptions += " halt_on_error=1";
1131 SetEnvironmentVar(kUBSanOptionsEnvVar, ubsanOptions.c_str());
1132 }
1133
1134 if ((mShardIndex == -1) != (mShardCount == -1))
1135 {
1136 printf("Shard index and shard count must be specified together.\n");
1137 exit(EXIT_FAILURE);
1138 }
1139
1140 if (!mFilterFile.empty())
1141 {
1142 if (filterArgIndex.valid())
1143 {
1144 printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1145 exit(EXIT_FAILURE);
1146 }
1147
1148 std::string fileContents;
1149 if (!ReadEntireFileToString(mFilterFile.c_str(), &fileContents))
1150 {
1151 printf("Error loading filter file: %s\n", mFilterFile.c_str());
1152 exit(EXIT_FAILURE);
1153 }
1154 mFilterString.assign(fileContents.data());
1155
1156 if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1157 {
1158 printf("Filter file must start with \"--gtest_filter=\".\n");
1159 exit(EXIT_FAILURE);
1160 }
1161
1162 // Note that we only add a filter string if we previously deleted a shader filter file
1163 // argument. So we will have space for the new filter string in argv.
1164 AddArg(argc, argv, mFilterString.c_str());
1165 }
1166
1167 // Call into gtest internals to force parameterized test name registration.
1168 testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1169 impl->RegisterParameterizedTests();
1170
1171 // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1172 testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1173
1174 std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1175
1176 if (mShardCount == 0)
1177 {
1178 printf("Shard count must be > 0.\n");
1179 exit(EXIT_FAILURE);
1180 }
1181 else if (mShardCount > 0)
1182 {
1183 if (mShardIndex >= mShardCount)
1184 {
1185 printf("Shard index must be less than shard count.\n");
1186 exit(EXIT_FAILURE);
1187 }
1188
1189 // If there's only one shard, we can use the testSet as defined above.
1190 if (mShardCount > 1)
1191 {
1192 if (!mBotMode && !UsesExternalBatching())
1193 {
1194 printf("Sharding is only supported in bot mode or external batching.\n");
1195 exit(EXIT_FAILURE);
1196 }
1197 // With external batching, we must use exactly the testSet as defined externally.
1198 // But when listing tests, we do need to apply sharding ourselves,
1199 // since we use our own implementation for listing tests and not GTest directly.
1200 if (!UsesExternalBatching() || mGTestListTests || mListTests)
1201 {
1202 testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1203 alsoRunDisabledTests);
1204 }
1205 }
1206 }
1207
1208 if (!testSet.empty())
1209 {
1210 std::stringstream fakeTestName;
1211 fakeTestName << kArtifactsFakeTestName << '-' << testSet[0].testName;
1212 mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1213 }
1214
1215 if (mBotMode)
1216 {
1217 // Split up test batches.
1218 mTestQueue = BatchTests(testSet, mBatchSize);
1219
1220 if (mDebugTestGroups)
1221 {
1222 std::cout << "Test Groups:\n";
1223
1224 while (!mTestQueue.empty())
1225 {
1226 const std::vector<TestIdentifier> &tests = mTestQueue.front();
1227 std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1228 << static_cast<int>(tests.size()) << ")\n";
1229 mTestQueue.pop();
1230 }
1231
1232 exit(EXIT_SUCCESS);
1233 }
1234 }
1235
1236 testing::InitGoogleTest(argc, argv);
1237
1238 mTotalResultCount = testSet.size();
1239
1240 if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1241 {
1242 // Create a default output file in bot mode.
1243 mResultsFile = "output.json";
1244 }
1245
1246 if (!mResultsDirectory.empty())
1247 {
1248 std::stringstream resultFileName;
1249 resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1250 mResultsFile = resultFileName.str();
1251 }
1252
1253 if (!mTestArtifactDirectory.empty())
1254 {
1255 mMetricWriter.enable(mTestArtifactDirectory);
1256 }
1257
1258 if (!mBotMode)
1259 {
1260 testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1261 listeners.Append(new TestEventListener(this));
1262
1263 for (const TestIdentifier &id : testSet)
1264 {
1265 mTestResults.results[id].type = TestResultType::NoResult;
1266 }
1267 }
1268 }
1269
~TestSuite()1270 TestSuite::~TestSuite()
1271 {
1272 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1273 if (mModifiedPreferredDevice && !angle::GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1274 {
1275 angle::UnsetEnvironmentVar(kPreferredDeviceEnvVar);
1276 }
1277
1278 if (mWatchdogThread.joinable())
1279 {
1280 mWatchdogThread.detach();
1281 }
1282 TerminateCrashHandler();
1283 }
1284
parseSingleArg(int * argc,char ** argv,int argIndex)1285 bool TestSuite::parseSingleArg(int *argc, char **argv, int argIndex)
1286 {
1287 // Note: Flags should be documented in README.md.
1288 return ParseIntArg("--shard-count", argc, argv, argIndex, &mShardCount) ||
1289 ParseIntArg("--shard-index", argc, argv, argIndex, &mShardIndex) ||
1290 ParseIntArg("--batch-size", argc, argv, argIndex, &mBatchSize) ||
1291 ParseIntArg("--max-processes", argc, argv, argIndex, &mMaxProcesses) ||
1292 ParseIntArg(kTestTimeoutArg, argc, argv, argIndex, &mTestTimeout) ||
1293 ParseIntArg("--batch-timeout", argc, argv, argIndex, &mBatchTimeout) ||
1294 ParseIntArg("--flaky-retries", argc, argv, argIndex, &mFlakyRetries) ||
1295 ParseIntArg("--max-failures", argc, argv, argIndex, &mMaxFailures) ||
1296 // Other test functions consume the batch ID, so keep it in the list.
1297 ParseIntArgWithHandling(kBatchId, argc, argv, argIndex, &mBatchId,
1298 ArgHandling::Preserve) ||
1299 ParseStringArg("--results-directory", argc, argv, argIndex, &mResultsDirectory) ||
1300 ParseStringArg(kResultFileArg, argc, argv, argIndex, &mResultsFile) ||
1301 ParseStringArg("--isolated-script-test-output", argc, argv, argIndex, &mResultsFile) ||
1302 ParseStringArg(kFilterFileArg, argc, argv, argIndex, &mFilterFile) ||
1303 ParseStringArg("--histogram-json-file", argc, argv, argIndex, &mHistogramJsonFile) ||
1304 // We need these overloads to work around technical debt in the Android test runner.
1305 ParseStringArg("--isolated-script-test-perf-output", argc, argv, argIndex,
1306 &mHistogramJsonFile) ||
1307 ParseStringArg("--isolated_script_test_perf_output", argc, argv, argIndex,
1308 &mHistogramJsonFile) ||
1309 ParseStringArg("--render-test-output-dir", argc, argv, argIndex,
1310 &mTestArtifactDirectory) ||
1311 ParseStringArg("--isolated-outdir", argc, argv, argIndex, &mTestArtifactDirectory) ||
1312 ParseFlag("--test-launcher-bot-mode", argc, argv, argIndex, &mBotMode) ||
1313 ParseFlag("--bot-mode", argc, argv, argIndex, &mBotMode) ||
1314 ParseFlag("--debug-test-groups", argc, argv, argIndex, &mDebugTestGroups) ||
1315 ParseFlag("--gtest_list_tests", argc, argv, argIndex, &mGTestListTests) ||
1316 ParseFlag("--list-tests", argc, argv, argIndex, &mListTests) ||
1317 ParseFlag("--print-test-stdout", argc, argv, argIndex, &mPrintTestStdout) ||
1318 ParseFlag(kDisableCrashHandler, argc, argv, argIndex, &mDisableCrashHandler);
1319 }
1320
onCrashOrTimeout(TestResultType crashOrTimeout)1321 void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1322 {
1323 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1324 if (mTestResults.currentTest.valid())
1325 {
1326 TestResult &result = mTestResults.results[mTestResults.currentTest];
1327 result.type = crashOrTimeout;
1328 result.elapsedTimeSeconds.back() = mTestResults.currentTestTimer.getElapsedWallClockTime();
1329 }
1330
1331 if (mResultsFile.empty())
1332 {
1333 printf("No results file specified.\n");
1334 return;
1335 }
1336
1337 writeOutputFiles(true);
1338 }
1339
launchChildTestProcess(uint32_t batchId,const std::vector<TestIdentifier> & testsInBatch)1340 bool TestSuite::launchChildTestProcess(uint32_t batchId,
1341 const std::vector<TestIdentifier> &testsInBatch)
1342 {
1343 // Create a temporary file to store the test list
1344 ProcessInfo processInfo;
1345
1346 Optional<std::string> filterBuffer = CreateTemporaryFile();
1347 if (!filterBuffer.valid())
1348 {
1349 std::cerr << "Error creating temporary file for test list.\n";
1350 return false;
1351 }
1352 processInfo.filterFileName.assign(filterBuffer.value());
1353
1354 std::string filterString = GetTestFilter(testsInBatch);
1355
1356 FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1357 if (!fp)
1358 {
1359 std::cerr << "Error opening temporary file for test list.\n";
1360 return false;
1361 }
1362 fprintf(fp, "%s", filterString.c_str());
1363 fclose(fp);
1364
1365 processInfo.filterString = filterString;
1366
1367 std::string filterFileArg = kFilterFileArg + std::string("=") + processInfo.filterFileName;
1368
1369 // Create a temporary file to store the test output.
1370 Optional<std::string> resultsBuffer = CreateTemporaryFile();
1371 if (!resultsBuffer.valid())
1372 {
1373 std::cerr << "Error creating temporary file for test list.\n";
1374 return false;
1375 }
1376 processInfo.resultsFileName.assign(resultsBuffer.value());
1377
1378 std::string resultsFileArg = kResultFileArg + std::string("=") + processInfo.resultsFileName;
1379
1380 // Construct command line for child process.
1381 std::vector<const char *> args;
1382
1383 args.push_back(mTestExecutableName.c_str());
1384 args.push_back(filterFileArg.c_str());
1385 args.push_back(resultsFileArg.c_str());
1386
1387 std::stringstream batchIdStream;
1388 batchIdStream << kBatchId << "=" << batchId;
1389 std::string batchIdString = batchIdStream.str();
1390 args.push_back(batchIdString.c_str());
1391
1392 for (const std::string &arg : mChildProcessArgs)
1393 {
1394 args.push_back(arg.c_str());
1395 }
1396
1397 if (mDisableCrashHandler)
1398 {
1399 args.push_back(kDisableCrashHandler);
1400 }
1401
1402 std::string timeoutStr;
1403 if (mTestTimeout != kDefaultTestTimeout)
1404 {
1405 std::stringstream timeoutStream;
1406 timeoutStream << kTestTimeoutArg << "=" << mTestTimeout;
1407 timeoutStr = timeoutStream.str();
1408 args.push_back(timeoutStr.c_str());
1409 }
1410
1411 std::string artifactsDir;
1412 if (!mTestArtifactDirectory.empty())
1413 {
1414 std::stringstream artifactsDirStream;
1415 artifactsDirStream << kIsolatedOutDir << "=" << mTestArtifactDirectory;
1416 artifactsDir = artifactsDirStream.str();
1417 args.push_back(artifactsDir.c_str());
1418 }
1419
1420 // Launch child process and wait for completion.
1421 processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1422
1423 if (!processInfo.process->started())
1424 {
1425 std::cerr << "Error launching child process.\n";
1426 return false;
1427 }
1428
1429 std::stringstream commandLineStr;
1430 for (const char *arg : args)
1431 {
1432 commandLineStr << arg << " ";
1433 }
1434
1435 processInfo.commandLine = commandLineStr.str();
1436 processInfo.testsInBatch = testsInBatch;
1437 mCurrentProcesses.emplace_back(std::move(processInfo));
1438 return true;
1439 }
1440
ParseTestIdentifierAndSetResult(const std::string & testName,TestResultType result,TestResults * results)1441 void ParseTestIdentifierAndSetResult(const std::string &testName,
1442 TestResultType result,
1443 TestResults *results)
1444 {
1445 // Trim off any whitespace + extra stuff at the end of the string.
1446 std::string modifiedTestName = testName.substr(0, testName.find(' '));
1447 modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));
1448 TestIdentifier id;
1449 bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1450 ASSERT(ok);
1451 results->results[id] = {result};
1452 }
1453
finishProcess(ProcessInfo * processInfo)1454 bool TestSuite::finishProcess(ProcessInfo *processInfo)
1455 {
1456 // Get test results and merge into main list.
1457 TestResults batchResults;
1458
1459 if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1460 {
1461 std::cerr << "Warning: could not find test results file from child process.\n";
1462
1463 // First assume all tests get skipped.
1464 for (const TestIdentifier &id : processInfo->testsInBatch)
1465 {
1466 batchResults.results[id] = {TestResultType::NoResult};
1467 }
1468
1469 // Attempt to reconstruct passing list from stdout snippets.
1470 const std::string &batchStdout = processInfo->process->getStdout();
1471 std::istringstream linesStream(batchStdout);
1472
1473 std::string line;
1474 while (std::getline(linesStream, line))
1475 {
1476 size_t startPos = line.find(kStartedTestString);
1477 size_t failPos = line.find(kFailedTestString);
1478 size_t passPos = line.find(kPassedTestString);
1479 size_t skippedPos = line.find(kSkippedTestString);
1480
1481 if (startPos != std::string::npos)
1482 {
1483 // Assume a test that's started crashed until we see it completed.
1484 std::string testName = line.substr(strlen(kStartedTestString));
1485 ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1486 }
1487 else if (failPos != std::string::npos)
1488 {
1489 std::string testName = line.substr(strlen(kFailedTestString));
1490 ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1491 }
1492 else if (passPos != std::string::npos)
1493 {
1494 std::string testName = line.substr(strlen(kPassedTestString));
1495 ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1496 }
1497 else if (skippedPos != std::string::npos)
1498 {
1499 std::string testName = line.substr(strlen(kSkippedTestString));
1500 ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1501 }
1502 }
1503 }
1504
1505 if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1506 {
1507 std::cerr << "Error merging batch test results.\n";
1508 return false;
1509 }
1510
1511 if (!batchResults.results.empty())
1512 {
1513 const TestIdentifier &id = batchResults.results.begin()->first;
1514 std::string config = GetConfigNameFromTestIdentifier(id);
1515 printf("Completed batch with config: %s\n", config.c_str());
1516
1517 for (const auto &resultIter : batchResults.results)
1518 {
1519 const TestResult &result = resultIter.second;
1520 if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1521 {
1522 printf("To reproduce the batch, use filter:\n%s\n",
1523 processInfo->filterString.c_str());
1524 break;
1525 }
1526 }
1527 }
1528
1529 // Process results and print unexpected errors.
1530 for (const auto &resultIter : batchResults.results)
1531 {
1532 const TestIdentifier &id = resultIter.first;
1533 const TestResult &result = resultIter.second;
1534
1535 // Skip results aren't procesed since they're added back to the test queue below.
1536 if (result.type == TestResultType::NoResult)
1537 {
1538 continue;
1539 }
1540
1541 mCurrentResultCount++;
1542
1543 printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1544 id.testName.c_str());
1545
1546 if (mPrintTestStdout)
1547 {
1548 const std::string &batchStdout = processInfo->process->getStdout();
1549 PrintTestOutputSnippet(id, result, batchStdout);
1550 }
1551 else if (result.type == TestResultType::Pass)
1552 {
1553 printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds.back() * 1000.0);
1554 }
1555 else if (result.type == TestResultType::Skip)
1556 {
1557 printf(" (skipped)\n");
1558 }
1559 else if (result.type == TestResultType::Timeout)
1560 {
1561 printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds.back());
1562 mFailureCount++;
1563 }
1564 else
1565 {
1566 printf(" (%s)\n", ResultTypeToString(result.type));
1567 mFailureCount++;
1568
1569 const std::string &batchStdout = processInfo->process->getStdout();
1570 PrintTestOutputSnippet(id, result, batchStdout);
1571 }
1572 }
1573
1574 // On unexpected exit, re-queue any unfinished tests.
1575 std::vector<TestIdentifier> unfinishedTests;
1576 for (const auto &resultIter : batchResults.results)
1577 {
1578 const TestIdentifier &id = resultIter.first;
1579 const TestResult &result = resultIter.second;
1580
1581 if (result.type == TestResultType::NoResult)
1582 {
1583 unfinishedTests.push_back(id);
1584 }
1585 }
1586
1587 if (!unfinishedTests.empty())
1588 {
1589 mTestQueue.emplace(std::move(unfinishedTests));
1590 }
1591
1592 // Clean up any dirty temporary files.
1593 for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1594 {
1595 // Note: we should be aware that this cleanup won't happen if the harness itself
1596 // crashes. If this situation comes up in the future we should add crash cleanup to the
1597 // harness.
1598 if (!angle::DeleteSystemFile(tempFile.c_str()))
1599 {
1600 std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1601 }
1602 }
1603
1604 processInfo->process.reset();
1605 return true;
1606 }
1607
run()1608 int TestSuite::run()
1609 {
1610 #if defined(ANGLE_PLATFORM_ANDROID)
1611 if (mListTests && mGTestListTests)
1612 {
1613 // Workaround for the Android test runner requiring a GTest test list.
1614 printf("PlaceholderTest.\n Placeholder\n");
1615 return EXIT_SUCCESS;
1616 }
1617 #endif // defined(ANGLE_PLATFORM_ANDROID)
1618
1619 if (mListTests)
1620 {
1621 ListTests(mTestResults.results);
1622
1623 #if defined(ANGLE_PLATFORM_ANDROID)
1624 // Because of quirks with the Chromium-provided Android test runner, we need to use a few
1625 // tricks to get the test list output. We add placeholder output for a single test to trick
1626 // the test runner into thinking it ran the tests successfully. We also add an end marker
1627 // for the tests list so we can parse the list from the more spammy Android stdout log.
1628 static constexpr char kPlaceholderTestTest[] = R"(
1629 [==========] Running 1 test from 1 test suite.
1630 [----------] Global test environment set-up.
1631 [----------] 1 test from PlaceholderTest
1632 [ RUN ] PlaceholderTest.Placeholder
1633 [ OK ] PlaceholderTest.Placeholder (0 ms)
1634 [----------] 1 test from APITest (0 ms total)
1635
1636 [----------] Global test environment tear-down
1637 [==========] 1 test from 1 test suite ran. (24 ms total)
1638 [ PASSED ] 1 test.
1639 )";
1640 printf(kPlaceholderTestTest);
1641 #endif // defined(ANGLE_PLATFORM_ANDROID)
1642
1643 return EXIT_SUCCESS;
1644 }
1645
1646 if (mGTestListTests)
1647 {
1648 GTestListTests(mTestResults.results);
1649 return EXIT_SUCCESS;
1650 }
1651
1652 // Run tests serially.
1653 if (!mBotMode)
1654 {
1655 // Only start the watchdog if the debugger is not attached and we're a child process.
1656 if (!angle::IsDebuggerAttached() && mBatchId != -1)
1657 {
1658 startWatchdog();
1659 }
1660
1661 int retVal = RUN_ALL_TESTS();
1662 {
1663 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1664 mTestResults.allDone = true;
1665 }
1666
1667 if (mWatchdogThread.joinable())
1668 {
1669 mWatchdogThread.join();
1670 }
1671 return retVal;
1672 }
1673
1674 Timer totalRunTime;
1675 totalRunTime.start();
1676
1677 Timer messageTimer;
1678 messageTimer.start();
1679
1680 uint32_t batchId = 0;
1681
1682 while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1683 {
1684 bool progress = false;
1685
1686 // Spawn a process if needed and possible.
1687 if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1688 {
1689 std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1690 mTestQueue.pop();
1691
1692 if (!launchChildTestProcess(++batchId, testsInBatch))
1693 {
1694 return 1;
1695 }
1696
1697 progress = true;
1698 }
1699
1700 // Check for process completion.
1701 uint32_t totalTestCount = 0;
1702 for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1703 {
1704 ProcessInfo &processInfo = *processIter;
1705 if (processInfo.process->finished())
1706 {
1707 if (!finishProcess(&processInfo))
1708 {
1709 return 1;
1710 }
1711 processIter = mCurrentProcesses.erase(processIter);
1712 progress = true;
1713 }
1714 else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1715 {
1716 // Terminate the process and record timeouts for the batch.
1717 // Because we can't determine which sub-test caused a timeout, record the whole
1718 // batch as a timeout failure. Can be improved by using socket message passing.
1719 if (!processInfo.process->kill())
1720 {
1721 return 1;
1722 }
1723
1724 const std::string &batchStdout = processInfo.process->getStdout();
1725 std::vector<std::string> lines =
1726 SplitString(batchStdout, "\r\n", WhitespaceHandling::TRIM_WHITESPACE,
1727 SplitResult::SPLIT_WANT_NONEMPTY);
1728 constexpr size_t kKeepLines = 10;
1729 printf("\nBatch timeout! Last %zu lines of batch stdout:\n", kKeepLines);
1730 printf("---------------------------------------------\n");
1731 for (size_t lineNo = lines.size() - std::min(lines.size(), kKeepLines);
1732 lineNo < lines.size(); ++lineNo)
1733 {
1734 printf("%s\n", lines[lineNo].c_str());
1735 }
1736 printf("---------------------------------------------\n\n");
1737
1738 for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1739 {
1740 // Because the whole batch failed we can't know how long each test took.
1741 mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1742 mFailureCount++;
1743 }
1744
1745 processIter = mCurrentProcesses.erase(processIter);
1746 progress = true;
1747 }
1748 else
1749 {
1750 totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1751 processIter++;
1752 }
1753 }
1754
1755 if (progress)
1756 {
1757 messageTimer.start();
1758 }
1759 else if (messageTimer.getElapsedWallClockTime() > kIdleMessageTimeout)
1760 {
1761 const ProcessInfo &processInfo = mCurrentProcesses[0];
1762 double processTime = processInfo.process->getElapsedTimeSeconds();
1763 printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1764 static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1765 messageTimer.start();
1766 }
1767
1768 // Early exit if we passed the maximum failure threshold. Still wait for current tests.
1769 if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1770 {
1771 printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1772 TestQueue emptyTestQueue;
1773 std::swap(mTestQueue, emptyTestQueue);
1774 }
1775
1776 // Sleep briefly and continue.
1777 angle::Sleep(100);
1778 }
1779
1780 // Dump combined results.
1781 if (mFailureCount > mMaxFailures)
1782 {
1783 printf(
1784 "Omitted results files because the failure count (%d) exceeded the maximum number of "
1785 "failures (%d).\n",
1786 mFailureCount, mMaxFailures);
1787 }
1788 else
1789 {
1790 writeOutputFiles(false);
1791 }
1792
1793 totalRunTime.stop();
1794 printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedWallClockTime());
1795
1796 return printFailuresAndReturnCount() == 0 ? 0 : 1;
1797 }
1798
printFailuresAndReturnCount() const1799 int TestSuite::printFailuresAndReturnCount() const
1800 {
1801 std::vector<std::string> failures;
1802 uint32_t skipCount = 0;
1803
1804 for (const auto &resultIter : mTestResults.results)
1805 {
1806 const TestIdentifier &id = resultIter.first;
1807 const TestResult &result = resultIter.second;
1808
1809 if (result.type == TestResultType::Skip)
1810 {
1811 skipCount++;
1812 }
1813 else if (result.type != TestResultType::Pass)
1814 {
1815 const FileLine &fileLine = mTestFileLines.find(id)->second;
1816
1817 std::stringstream failureMessage;
1818 failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1819 << ResultTypeToString(result.type) << ")";
1820 failures.emplace_back(failureMessage.str());
1821 }
1822 }
1823
1824 if (failures.empty())
1825 return 0;
1826
1827 printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1828 for (const std::string &failure : failures)
1829 {
1830 printf(" %s\n", failure.c_str());
1831 }
1832 if (skipCount > 0)
1833 {
1834 printf("%u tests skipped.\n", skipCount);
1835 }
1836
1837 return static_cast<int>(failures.size());
1838 }
1839
startWatchdog()1840 void TestSuite::startWatchdog()
1841 {
1842 auto watchdogMain = [this]() {
1843 do
1844 {
1845 {
1846 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1847 if (mTestResults.currentTestTimer.getElapsedWallClockTime() >
1848 mTestResults.currentTestTimeout)
1849 {
1850 break;
1851 }
1852
1853 if (mTestResults.allDone)
1854 return;
1855 }
1856
1857 angle::Sleep(500);
1858 } while (true);
1859 onCrashOrTimeout(TestResultType::Timeout);
1860 ::_Exit(EXIT_FAILURE);
1861 };
1862 mWatchdogThread = std::thread(watchdogMain);
1863 }
1864
addHistogramSample(const std::string & measurement,const std::string & story,double value,const std::string & units)1865 void TestSuite::addHistogramSample(const std::string &measurement,
1866 const std::string &story,
1867 double value,
1868 const std::string &units)
1869 {
1870 mHistogramWriter.addSample(measurement, story, value, units);
1871 }
1872
hasTestArtifactsDirectory() const1873 bool TestSuite::hasTestArtifactsDirectory() const
1874 {
1875 return !mTestArtifactDirectory.empty();
1876 }
1877
reserveTestArtifactPath(const std::string & artifactName)1878 std::string TestSuite::reserveTestArtifactPath(const std::string &artifactName)
1879 {
1880 mTestResults.testArtifactPaths.push_back(artifactName);
1881
1882 if (mTestArtifactDirectory.empty())
1883 {
1884 return artifactName;
1885 }
1886
1887 std::stringstream pathStream;
1888 pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1889 return pathStream.str();
1890 }
1891
GetTestResultsFromFile(const char * fileName,TestResults * resultsOut)1892 bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1893 {
1894 std::ifstream ifs(fileName);
1895 if (!ifs.is_open())
1896 {
1897 std::cerr << "Error opening " << fileName << "\n";
1898 return false;
1899 }
1900
1901 js::IStreamWrapper ifsWrapper(ifs);
1902 js::Document document;
1903 document.ParseStream(ifsWrapper);
1904
1905 if (document.HasParseError())
1906 {
1907 std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1908 return false;
1909 }
1910
1911 if (!GetTestResultsFromJSON(document, resultsOut))
1912 {
1913 std::cerr << "Error getting test results from JSON.\n";
1914 return false;
1915 }
1916
1917 return true;
1918 }
1919
dumpTestExpectationsErrorMessages()1920 void TestSuite::dumpTestExpectationsErrorMessages()
1921 {
1922 std::stringstream errorMsgStream;
1923 for (const auto &message : mTestExpectationsParser.getErrorMessages())
1924 {
1925 errorMsgStream << std::endl << " " << message;
1926 }
1927
1928 std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
1929 }
1930
loadTestExpectationsFromFileWithConfig(const GPUTestConfig & config,const std::string & fileName)1931 bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
1932 const std::string &fileName)
1933 {
1934 if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
1935 {
1936 dumpTestExpectationsErrorMessages();
1937 return false;
1938 }
1939 return true;
1940 }
1941
loadAllTestExpectationsFromFile(const std::string & fileName)1942 bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
1943 {
1944 if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
1945 {
1946 dumpTestExpectationsErrorMessages();
1947 return false;
1948 }
1949 return true;
1950 }
1951
logAnyUnusedTestExpectations()1952 bool TestSuite::logAnyUnusedTestExpectations()
1953 {
1954 std::stringstream unusedMsgStream;
1955 bool anyUnused = false;
1956 for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
1957 {
1958 anyUnused = true;
1959 unusedMsgStream << std::endl << " " << message;
1960 }
1961 if (anyUnused)
1962 {
1963 std::cerr << "Found unused test expectations:" << unusedMsgStream.str() << std::endl;
1964 return true;
1965 }
1966 return false;
1967 }
1968
getTestExpectation(const std::string & testName)1969 int32_t TestSuite::getTestExpectation(const std::string &testName)
1970 {
1971 return mTestExpectationsParser.getTestExpectation(testName);
1972 }
1973
maybeUpdateTestTimeout(uint32_t testExpectation)1974 void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
1975 {
1976 double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
1977 ? getSlowTestTimeout()
1978 : mTestTimeout;
1979 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1980 mTestResults.currentTestTimeout = testTimeout;
1981 }
1982
getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig & config,const std::string & testName)1983 int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
1984 const std::string &testName)
1985 {
1986 uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
1987 maybeUpdateTestTimeout(expectation);
1988 return expectation;
1989 }
1990
getSlowTestTimeout() const1991 int TestSuite::getSlowTestTimeout() const
1992 {
1993 return mTestTimeout * kSlowTestTimeoutScale;
1994 }
1995
writeOutputFiles(bool interrupted)1996 void TestSuite::writeOutputFiles(bool interrupted)
1997 {
1998 if (!mResultsFile.empty())
1999 {
2000 WriteResultsFile(interrupted, mTestResults, mResultsFile);
2001 }
2002
2003 if (!mHistogramJsonFile.empty())
2004 {
2005 WriteHistogramJson(mHistogramWriter, mHistogramJsonFile);
2006 }
2007
2008 mMetricWriter.close();
2009 }
2010
TestResultTypeToString(TestResultType type)2011 const char *TestResultTypeToString(TestResultType type)
2012 {
2013 switch (type)
2014 {
2015 case TestResultType::Crash:
2016 return "Crash";
2017 case TestResultType::Fail:
2018 return "Fail";
2019 case TestResultType::NoResult:
2020 return "NoResult";
2021 case TestResultType::Pass:
2022 return "Pass";
2023 case TestResultType::Skip:
2024 return "Skip";
2025 case TestResultType::Timeout:
2026 return "Timeout";
2027 case TestResultType::Unknown:
2028 default:
2029 return "Unknown";
2030 }
2031 }
2032
2033 // This code supports using "-" in test names, which happens often in dEQP. GTest uses as a marker
2034 // for the beginning of the exclusion filter. Work around this by replacing "-" with "?" which
2035 // matches any single character.
ReplaceDashesWithQuestionMark(std::string dashesString)2036 std::string ReplaceDashesWithQuestionMark(std::string dashesString)
2037 {
2038 std::string noDashesString = dashesString;
2039 ReplaceAllSubstrings(&noDashesString, "-", "?");
2040 return noDashesString;
2041 }
2042 } // namespace angle
2043