1 // Copyright 2019 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/test/launcher/test_launcher.h"
6
7 #include <stddef.h>
8
9 #include <optional>
10
11 #include "base/base64.h"
12 #include "base/command_line.h"
13 #include "base/files/file_util.h"
14 #include "base/files/scoped_temp_dir.h"
15 #include "base/functional/bind.h"
16 #include "base/functional/callback_helpers.h"
17 #include "base/i18n/time_formatting.h"
18 #include "base/logging.h"
19 #include "base/no_destructor.h"
20 #include "base/process/launch.h"
21 #include "base/strings/strcat.h"
22 #include "base/strings/string_split.h"
23 #include "base/strings/string_util.h"
24 #include "base/test/gtest_xml_util.h"
25 #include "base/test/launcher/test_launcher_test_utils.h"
26 #include "base/test/launcher/unit_test_launcher.h"
27 #include "base/test/multiprocess_test.h"
28 #include "base/test/scoped_logging_settings.h"
29 #include "base/test/task_environment.h"
30 #include "base/test/test_timeouts.h"
31 #include "build/build_config.h"
32 #include "build/chromeos_buildflags.h"
33 #include "testing/gmock/include/gmock/gmock.h"
34 #include "testing/gtest/include/gtest/gtest.h"
35 #include "testing/multiprocess_func_list.h"
36
37 namespace base {
38 namespace {
39
40 using ::testing::_;
41 using ::testing::DoAll;
42 using ::testing::Invoke;
43 using ::testing::InvokeWithoutArgs;
44 using ::testing::Return;
45 using ::testing::ReturnPointee;
46
GenerateTestResult(const std::string & test_name,TestResult::Status status,TimeDelta elapsed_td=Milliseconds (30),const std::string & output_snippet="output")47 TestResult GenerateTestResult(const std::string& test_name,
48 TestResult::Status status,
49 TimeDelta elapsed_td = Milliseconds(30),
50 const std::string& output_snippet = "output") {
51 TestResult result;
52 result.full_name = test_name;
53 result.status = status;
54 result.elapsed_time = elapsed_td;
55 result.output_snippet = output_snippet;
56 return result;
57 }
58
GenerateTestResultPart(TestResultPart::Type type,const std::string & file_name,int line_number,const std::string & summary,const std::string & message)59 TestResultPart GenerateTestResultPart(TestResultPart::Type type,
60 const std::string& file_name,
61 int line_number,
62 const std::string& summary,
63 const std::string& message) {
64 TestResultPart test_result_part;
65 test_result_part.type = type;
66 test_result_part.file_name = file_name;
67 test_result_part.line_number = line_number;
68 test_result_part.summary = summary;
69 test_result_part.message = message;
70 return test_result_part;
71 }
72
73 // Mock TestLauncher to mock CreateAndStartThreadPool,
74 // unit test will provide a TaskEnvironment.
75 class MockTestLauncher : public TestLauncher {
76 public:
MockTestLauncher(TestLauncherDelegate * launcher_delegate,size_t parallel_jobs)77 MockTestLauncher(TestLauncherDelegate* launcher_delegate,
78 size_t parallel_jobs)
79 : TestLauncher(launcher_delegate, parallel_jobs) {}
80
CreateAndStartThreadPool(size_t parallel_jobs)81 void CreateAndStartThreadPool(size_t parallel_jobs) override {}
82
83 MOCK_METHOD4(LaunchChildGTestProcess,
84 void(scoped_refptr<TaskRunner> task_runner,
85 const std::vector<std::string>& test_names,
86 const FilePath& task_temp_dir,
87 const FilePath& child_temp_dir));
88 };
89
90 // Simple TestLauncherDelegate mock to test TestLauncher flow.
91 class MockTestLauncherDelegate : public TestLauncherDelegate {
92 public:
93 MOCK_METHOD1(GetTests, bool(std::vector<TestIdentifier>* output));
94 MOCK_METHOD2(WillRunTest,
95 bool(const std::string& test_case_name,
96 const std::string& test_name));
97 MOCK_METHOD2(ProcessTestResults,
98 void(std::vector<TestResult>& test_names,
99 TimeDelta elapsed_time));
100 MOCK_METHOD3(GetCommandLine,
101 CommandLine(const std::vector<std::string>& test_names,
102 const FilePath& temp_dir_,
103 FilePath* output_file_));
104 MOCK_METHOD1(IsPreTask, bool(const std::vector<std::string>& test_names));
105 MOCK_METHOD0(GetWrapper, std::string());
106 MOCK_METHOD0(GetLaunchOptions, int());
107 MOCK_METHOD0(GetTimeout, TimeDelta());
108 MOCK_METHOD0(GetBatchSize, size_t());
109 };
110
111 class MockResultWatcher : public ResultWatcher {
112 public:
MockResultWatcher(FilePath result_file,size_t num_tests)113 MockResultWatcher(FilePath result_file, size_t num_tests)
114 : ResultWatcher(result_file, num_tests) {}
115
116 MOCK_METHOD(bool, WaitWithTimeout, (TimeDelta), (override));
117 };
118
119 // Using MockTestLauncher to test TestLauncher.
120 // Test TestLauncher filters, and command line switches setup.
121 class TestLauncherTest : public testing::Test {
122 protected:
TestLauncherTest()123 TestLauncherTest()
124 : command_line(new CommandLine(CommandLine::NO_PROGRAM)),
125 test_launcher(&delegate, 10) {}
126
127 // Adds tests to be returned by the delegate.
AddMockedTests(std::string test_case_name,const std::vector<std::string> & test_names)128 void AddMockedTests(std::string test_case_name,
129 const std::vector<std::string>& test_names) {
130 for (const std::string& test_name : test_names) {
131 TestIdentifier test_data;
132 test_data.test_case_name = test_case_name;
133 test_data.test_name = test_name;
134 test_data.file = "File";
135 test_data.line = 100;
136 tests_.push_back(test_data);
137 }
138 }
139
140 // Setup expected delegate calls, and which tests the delegate will return.
SetUpExpectCalls(size_t batch_size=10)141 void SetUpExpectCalls(size_t batch_size = 10) {
142 EXPECT_CALL(delegate, GetTests(_))
143 .WillOnce(::testing::DoAll(testing::SetArgPointee<0>(tests_),
144 testing::Return(true)));
145 EXPECT_CALL(delegate, WillRunTest(_, _))
146 .WillRepeatedly(testing::Return(true));
147 EXPECT_CALL(delegate, ProcessTestResults(_, _)).Times(0);
148 EXPECT_CALL(delegate, GetCommandLine(_, _, _))
149 .WillRepeatedly(testing::Return(CommandLine(CommandLine::NO_PROGRAM)));
150 EXPECT_CALL(delegate, GetWrapper())
151 .WillRepeatedly(testing::Return(std::string()));
152 EXPECT_CALL(delegate, IsPreTask(_)).WillRepeatedly(testing::Return(true));
153 EXPECT_CALL(delegate, GetLaunchOptions())
154 .WillRepeatedly(testing::Return(true));
155 EXPECT_CALL(delegate, GetTimeout())
156 .WillRepeatedly(testing::Return(TimeDelta()));
157 EXPECT_CALL(delegate, GetBatchSize())
158 .WillRepeatedly(testing::Return(batch_size));
159 }
160
161 std::unique_ptr<CommandLine> command_line;
162 MockTestLauncher test_launcher;
163 MockTestLauncherDelegate delegate;
164 base::test::TaskEnvironment task_environment{
165 base::test::TaskEnvironment::MainThreadType::IO};
166 ScopedTempDir dir;
167
CreateFilterFile()168 FilePath CreateFilterFile() {
169 FilePath result_file = dir.GetPath().AppendASCII("test.filter");
170 WriteFile(result_file, "-Test.firstTest");
171 return result_file;
172 }
173
174 private:
175 std::vector<TestIdentifier> tests_;
176 };
177
178 class ResultWatcherTest : public testing::Test {
179 protected:
180 ResultWatcherTest() = default;
181
CreateResultFile()182 FilePath CreateResultFile() {
183 FilePath result_file = dir.GetPath().AppendASCII("test_results.xml");
184 WriteFile(result_file,
185 "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
186 "<testsuites>\n"
187 " <testsuite>\n");
188 return result_file;
189 }
190
191 base::test::TaskEnvironment task_environment{
192 base::test::TaskEnvironment::TimeSource::MOCK_TIME};
193 ScopedTempDir dir;
194 };
195
196 // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P3(OnTestResult,launcher,full_name,status)197 ACTION_P3(OnTestResult, launcher, full_name, status) {
198 TestResult result = GenerateTestResult(full_name, status);
199 arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
200 Unretained(launcher), result));
201 }
202
203 // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P2(OnTestResult,launcher,result)204 ACTION_P2(OnTestResult, launcher, result) {
205 arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
206 Unretained(launcher), result));
207 }
208
209 // A test and a disabled test cannot share a name.
TEST_F(TestLauncherTest,TestNameSharedWithDisabledTest)210 TEST_F(TestLauncherTest, TestNameSharedWithDisabledTest) {
211 AddMockedTests("Test", {"firstTest", "DISABLED_firstTest"});
212 SetUpExpectCalls();
213 EXPECT_FALSE(test_launcher.Run(command_line.get()));
214 }
215
216 // A test case and a disabled test case cannot share a name.
TEST_F(TestLauncherTest,TestNameSharedWithDisabledTestCase)217 TEST_F(TestLauncherTest, TestNameSharedWithDisabledTestCase) {
218 AddMockedTests("DISABLED_Test", {"firstTest"});
219 AddMockedTests("Test", {"firstTest"});
220 SetUpExpectCalls();
221 EXPECT_FALSE(test_launcher.Run(command_line.get()));
222 }
223
224 // Compiled tests should not contain an orphaned pre test.
TEST_F(TestLauncherTest,OrphanePreTest)225 TEST_F(TestLauncherTest, OrphanePreTest) {
226 AddMockedTests("Test", {"firstTest", "PRE_firstTestOrphane"});
227 SetUpExpectCalls();
228 EXPECT_FALSE(test_launcher.Run(command_line.get()));
229 }
230
231 // When There are no tests, delegate should not be called.
TEST_F(TestLauncherTest,EmptyTestSetPasses)232 TEST_F(TestLauncherTest, EmptyTestSetPasses) {
233 SetUpExpectCalls();
234 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _)).Times(0);
235 EXPECT_TRUE(test_launcher.Run(command_line.get()));
236 }
237
238 // Test TestLauncher filters DISABLED tests by default.
TEST_F(TestLauncherTest,FilterDisabledTestByDefault)239 TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
240 AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
241 AddMockedTests("Test",
242 {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
243 SetUpExpectCalls();
244 std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
245 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
246 _,
247 testing::ElementsAreArray(tests_names.cbegin(),
248 tests_names.cend()),
249 _, _))
250 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
251 TestResult::TEST_SUCCESS),
252 OnTestResult(&test_launcher, "Test.secondTest",
253 TestResult::TEST_SUCCESS)));
254 EXPECT_TRUE(test_launcher.Run(command_line.get()));
255 }
256
257 // Test TestLauncher should reorder PRE_ tests before delegate
TEST_F(TestLauncherTest,ReorderPreTests)258 TEST_F(TestLauncherTest, ReorderPreTests) {
259 AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
260 SetUpExpectCalls();
261 std::vector<std::string> tests_names = {
262 "Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
263 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
264 _,
265 testing::ElementsAreArray(tests_names.cbegin(),
266 tests_names.cend()),
267 _, _))
268 .Times(1);
269 EXPECT_TRUE(test_launcher.Run(command_line.get()));
270 }
271
272 // Test TestLauncher "gtest_filter" switch.
TEST_F(TestLauncherTest,UsingCommandLineFilter)273 TEST_F(TestLauncherTest, UsingCommandLineFilter) {
274 AddMockedTests("Test",
275 {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
276 SetUpExpectCalls();
277 command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
278 std::vector<std::string> tests_names = {"Test.firstTest"};
279 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
280 _,
281 testing::ElementsAreArray(tests_names.cbegin(),
282 tests_names.cend()),
283 _, _))
284 .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
285 TestResult::TEST_SUCCESS));
286 EXPECT_TRUE(test_launcher.Run(command_line.get()));
287 }
288
289 // Test TestLauncher gtest filter will include pre tests
TEST_F(TestLauncherTest,FilterIncludePreTest)290 TEST_F(TestLauncherTest, FilterIncludePreTest) {
291 AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"});
292 SetUpExpectCalls();
293 command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest");
294 std::vector<std::string> tests_names = {"Test.PRE_firstTest",
295 "Test.firstTest"};
296 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
297 _,
298 testing::ElementsAreArray(tests_names.cbegin(),
299 tests_names.cend()),
300 _, _))
301 .Times(1);
302 EXPECT_TRUE(test_launcher.Run(command_line.get()));
303 }
304
305 // Test TestLauncher gtest filter works when both include and exclude filter
306 // are defined.
TEST_F(TestLauncherTest,FilterIncludeExclude)307 TEST_F(TestLauncherTest, FilterIncludeExclude) {
308 AddMockedTests("Test", {"firstTest", "PRE_firstTest", "secondTest",
309 "PRE_secondTest", "thirdTest", "DISABLED_Disable1"});
310 SetUpExpectCalls();
311 command_line->AppendSwitchASCII("gtest_filter",
312 "Test.*Test:-Test.secondTest");
313 std::vector<std::string> tests_names = {
314 "Test.PRE_firstTest",
315 "Test.firstTest",
316 "Test.thirdTest",
317 };
318 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
319 _,
320 testing::ElementsAreArray(tests_names.cbegin(),
321 tests_names.cend()),
322 _, _))
323 .Times(1);
324 EXPECT_TRUE(test_launcher.Run(command_line.get()));
325 }
326
327 // Test TestLauncher "gtest_repeat" switch.
TEST_F(TestLauncherTest,RepeatTest)328 TEST_F(TestLauncherTest, RepeatTest) {
329 AddMockedTests("Test", {"firstTest"});
330 SetUpExpectCalls();
331 // Unless --gtest-break-on-failure is specified,
332 command_line->AppendSwitchASCII("gtest_repeat", "2");
333 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
334 .Times(2)
335 .WillRepeatedly(::testing::DoAll(OnTestResult(
336 &test_launcher, "Test.firstTest", TestResult::TEST_SUCCESS)));
337 EXPECT_TRUE(test_launcher.Run(command_line.get()));
338 }
339
340 // Test TestLauncher --gtest_repeat and --gtest_break_on_failure.
TEST_F(TestLauncherTest,RunningMultipleIterationsUntilFailure)341 TEST_F(TestLauncherTest, RunningMultipleIterationsUntilFailure) {
342 AddMockedTests("Test", {"firstTest"});
343 SetUpExpectCalls();
344 // Unless --gtest-break-on-failure is specified,
345 command_line->AppendSwitchASCII("gtest_repeat", "4");
346 command_line->AppendSwitch("gtest_break_on_failure");
347 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
348 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
349 TestResult::TEST_SUCCESS)))
350 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
351 TestResult::TEST_SUCCESS)))
352 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
353 TestResult::TEST_FAILURE)));
354 EXPECT_FALSE(test_launcher.Run(command_line.get()));
355 }
356
357 // Test TestLauncher will retry failed test, and stop on success.
TEST_F(TestLauncherTest,SuccessOnRetryTests)358 TEST_F(TestLauncherTest, SuccessOnRetryTests) {
359 AddMockedTests("Test", {"firstTest"});
360 SetUpExpectCalls();
361 command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
362 std::vector<std::string> tests_names = {"Test.firstTest"};
363 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
364 _,
365 testing::ElementsAreArray(tests_names.cbegin(),
366 tests_names.cend()),
367 _, _))
368 .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
369 TestResult::TEST_FAILURE))
370 .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
371 TestResult::TEST_SUCCESS));
372 EXPECT_TRUE(test_launcher.Run(command_line.get()));
373 }
374
375 // Test TestLauncher will retry continuing failing test up to retry limit,
376 // before eventually failing and returning false.
TEST_F(TestLauncherTest,FailOnRetryTests)377 TEST_F(TestLauncherTest, FailOnRetryTests) {
378 AddMockedTests("Test", {"firstTest"});
379 SetUpExpectCalls();
380 command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
381 std::vector<std::string> tests_names = {"Test.firstTest"};
382 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
383 _,
384 testing::ElementsAreArray(tests_names.cbegin(),
385 tests_names.cend()),
386 _, _))
387 .Times(3)
388 .WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
389 TestResult::TEST_FAILURE));
390 EXPECT_FALSE(test_launcher.Run(command_line.get()));
391 }
392
393 // Test TestLauncher should retry all PRE_ chained tests
TEST_F(TestLauncherTest,RetryPreTests)394 TEST_F(TestLauncherTest, RetryPreTests) {
395 AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
396 SetUpExpectCalls();
397 command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
398 std::vector<TestResult> results = {
399 GenerateTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
400 GenerateTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
401 GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS)};
402 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
403 .WillOnce(::testing::DoAll(
404 OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
405 TestResult::TEST_SUCCESS),
406 OnTestResult(&test_launcher, "Test.PRE_firstTest",
407 TestResult::TEST_FAILURE),
408 OnTestResult(&test_launcher, "Test.firstTest",
409 TestResult::TEST_SUCCESS)));
410 std::vector<std::string> tests_names = {"Test.PRE_PRE_firstTest"};
411 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
412 _,
413 testing::ElementsAreArray(tests_names.cbegin(),
414 tests_names.cend()),
415 _, _))
416 .WillOnce(OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
417 TestResult::TEST_SUCCESS));
418 tests_names = {"Test.PRE_firstTest"};
419 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
420 _,
421 testing::ElementsAreArray(tests_names.cbegin(),
422 tests_names.cend()),
423 _, _))
424 .WillOnce(OnTestResult(&test_launcher, "Test.PRE_firstTest",
425 TestResult::TEST_SUCCESS));
426 tests_names = {"Test.firstTest"};
427 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
428 _,
429 testing::ElementsAreArray(tests_names.cbegin(),
430 tests_names.cend()),
431 _, _))
432 .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
433 TestResult::TEST_SUCCESS));
434 EXPECT_TRUE(test_launcher.Run(command_line.get()));
435 }
436
437 // Test TestLauncher should fail if a PRE test fails but its non-PRE test passes
TEST_F(TestLauncherTest,PreTestFailure)438 TEST_F(TestLauncherTest, PreTestFailure) {
439 AddMockedTests("Test", {"FirstTest", "PRE_FirstTest"});
440 SetUpExpectCalls();
441 std::vector<TestResult> results = {
442 GenerateTestResult("Test.PRE_FirstTest", TestResult::TEST_FAILURE),
443 GenerateTestResult("Test.FirstTest", TestResult::TEST_SUCCESS)};
444 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
445 .WillOnce(
446 ::testing::DoAll(OnTestResult(&test_launcher, "Test.PRE_FirstTest",
447 TestResult::TEST_FAILURE),
448 OnTestResult(&test_launcher, "Test.FirstTest",
449 TestResult::TEST_SUCCESS)));
450 EXPECT_CALL(test_launcher,
451 LaunchChildGTestProcess(
452 _, testing::ElementsAre("Test.PRE_FirstTest"), _, _))
453 .WillOnce(OnTestResult(&test_launcher, "Test.PRE_FirstTest",
454 TestResult::TEST_FAILURE));
455 EXPECT_CALL(
456 test_launcher,
457 LaunchChildGTestProcess(_, testing::ElementsAre("Test.FirstTest"), _, _))
458 .WillOnce(OnTestResult(&test_launcher, "Test.FirstTest",
459 TestResult::TEST_SUCCESS));
460 EXPECT_FALSE(test_launcher.Run(command_line.get()));
461 }
462
463 // Test TestLauncher run disabled unit tests switch.
TEST_F(TestLauncherTest,RunDisabledTests)464 TEST_F(TestLauncherTest, RunDisabledTests) {
465 AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
466 AddMockedTests("Test",
467 {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
468 SetUpExpectCalls();
469 command_line->AppendSwitch("gtest_also_run_disabled_tests");
470 command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
471 std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
472 "Test.firstTest",
473 "Test.DISABLED_firstTestDisabled"};
474 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
475 _,
476 testing::ElementsAreArray(tests_names.cbegin(),
477 tests_names.cend()),
478 _, _))
479 .WillOnce(::testing::DoAll(
480 OnTestResult(&test_launcher, "Test.firstTest",
481 TestResult::TEST_SUCCESS),
482 OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
483 TestResult::TEST_SUCCESS),
484 OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
485 TestResult::TEST_SUCCESS)));
486 EXPECT_TRUE(test_launcher.Run(command_line.get()));
487 }
488
489 // Test TestLauncher does not run negative tests filtered under
490 // testing/buildbot/filters.
TEST_F(TestLauncherTest,DoesRunFilteredTests)491 TEST_F(TestLauncherTest, DoesRunFilteredTests) {
492 AddMockedTests("Test", {"firstTest", "secondTest"});
493 SetUpExpectCalls();
494 ASSERT_TRUE(dir.CreateUniqueTempDir());
495 // filter file content is "-Test.firstTest"
496 FilePath path = CreateFilterFile();
497 command_line->AppendSwitchPath("test-launcher-filter-file", path);
498 std::vector<std::string> tests_names = {"Test.secondTest"};
499 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
500 _,
501 testing::ElementsAreArray(tests_names.cbegin(),
502 tests_names.cend()),
503 _, _))
504 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.secondTest",
505 TestResult::TEST_SUCCESS)));
506 EXPECT_TRUE(test_launcher.Run(command_line.get()));
507 }
508
509 // Test TestLauncher run disabled tests and negative tests filtered under
510 // testing/buildbot/filters, when gtest_also_run_disabled_tests is set.
TEST_F(TestLauncherTest,RunDisabledTestsWithFilteredTests)511 TEST_F(TestLauncherTest, RunDisabledTestsWithFilteredTests) {
512 AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
513 AddMockedTests("Test", {"firstTest", "DISABLED_firstTestDisabled"});
514 SetUpExpectCalls();
515 ASSERT_TRUE(dir.CreateUniqueTempDir());
516 // filter file content is "-Test.firstTest", but Test.firstTest will still
517 // run due to gtest_also_run_disabled_tests is set.
518 FilePath path = CreateFilterFile();
519 command_line->AppendSwitchPath("test-launcher-filter-file", path);
520 command_line->AppendSwitch("gtest_also_run_disabled_tests");
521 std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
522 "Test.firstTest",
523 "Test.DISABLED_firstTestDisabled"};
524 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
525 _,
526 testing::ElementsAreArray(tests_names.cbegin(),
527 tests_names.cend()),
528 _, _))
529 .WillOnce(::testing::DoAll(
530 OnTestResult(&test_launcher, "Test.firstTest",
531 TestResult::TEST_SUCCESS),
532 OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
533 TestResult::TEST_SUCCESS),
534 OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
535 TestResult::TEST_SUCCESS)));
536 EXPECT_TRUE(test_launcher.Run(command_line.get()));
537 }
538
539 // Disabled test should disable all pre tests
TEST_F(TestLauncherTest,DisablePreTests)540 TEST_F(TestLauncherTest, DisablePreTests) {
541 AddMockedTests("Test", {"DISABLED_firstTest", "PRE_PRE_firstTest",
542 "PRE_firstTest", "secondTest"});
543 SetUpExpectCalls();
544 std::vector<std::string> tests_names = {"Test.secondTest"};
545 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
546 _,
547 testing::ElementsAreArray(tests_names.cbegin(),
548 tests_names.cend()),
549 _, _))
550 .Times(1);
551 EXPECT_TRUE(test_launcher.Run(command_line.get()));
552 }
553
554 // Test TestLauncher enforce to run tests in the exact positive filter.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFilter)555 TEST_F(TestLauncherTest, EnforceRunTestsInExactPositiveFilter) {
556 AddMockedTests("Test", {"firstTest", "secondTest", "thirdTest"});
557 SetUpExpectCalls();
558
559 ASSERT_TRUE(dir.CreateUniqueTempDir());
560 FilePath path = dir.GetPath().AppendASCII("test.filter");
561 WriteFile(path, "Test.firstTest\nTest.thirdTest");
562 command_line->AppendSwitchPath("test-launcher-filter-file", path);
563 command_line->AppendSwitch("enforce-exact-positive-filter");
564 command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
565 command_line->AppendSwitchASCII("test-launcher-shard-index", "0");
566
567 // Test.firstTest is in the exact positive filter, so expected to run.
568 // Test.thirdTest is launched in another shard.
569 std::vector<std::string> tests_names = {"Test.firstTest"};
570 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
571 _,
572 testing::ElementsAreArray(tests_names.cbegin(),
573 tests_names.cend()),
574 _, _))
575 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
576 TestResult::TEST_SUCCESS)));
577 EXPECT_TRUE(test_launcher.Run(command_line.get()));
578 }
579
580 // Test TestLauncher should fail if enforce-exact-positive-filter and
581 // gtest_filter both presented.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFailWithGtestFilterFlag)582 TEST_F(TestLauncherTest,
583 EnforceRunTestsInExactPositiveFailWithGtestFilterFlag) {
584 command_line->AppendSwitch("enforce-exact-positive-filter");
585 command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest;-Test.*");
586 EXPECT_FALSE(test_launcher.Run(command_line.get()));
587 }
588
589 // Test TestLauncher should fail if enforce-exact-positive-filter is set
590 // with negative test filters.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFailWithNegativeFilter)591 TEST_F(TestLauncherTest, EnforceRunTestsInExactPositiveFailWithNegativeFilter) {
592 command_line->AppendSwitch("enforce-exact-positive-filter");
593 ASSERT_TRUE(dir.CreateUniqueTempDir());
594 FilePath path = CreateFilterFile();
595 command_line->AppendSwitchPath("test-launcher-filter-file", path);
596 EXPECT_FALSE(test_launcher.Run(command_line.get()));
597 }
598
599 // Test TestLauncher should fail if enforce-exact-positive-filter is set
600 // with wildcard positive filters.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFailWithWildcardPositiveFilter)601 TEST_F(TestLauncherTest,
602 EnforceRunTestsInExactPositiveFailWithWildcardPositiveFilter) {
603 command_line->AppendSwitch("enforce-exact-positive-filter");
604 ASSERT_TRUE(dir.CreateUniqueTempDir());
605 FilePath path = dir.GetPath().AppendASCII("test.filter");
606 WriteFile(path, "Test.*");
607 command_line->AppendSwitchPath("test-launcher-filter-file", path);
608 EXPECT_FALSE(test_launcher.Run(command_line.get()));
609 }
610
611 // Tests fail if they produce too much output.
TEST_F(TestLauncherTest,ExcessiveOutput)612 TEST_F(TestLauncherTest, ExcessiveOutput) {
613 AddMockedTests("Test", {"firstTest"});
614 SetUpExpectCalls();
615 command_line->AppendSwitchASCII("test-launcher-retry-limit", "0");
616 command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "never");
617 TestResult test_result =
618 GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
619 Milliseconds(30), std::string(500000, 'a'));
620 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
621 .WillOnce(OnTestResult(&test_launcher, test_result));
622 EXPECT_FALSE(test_launcher.Run(command_line.get()));
623 }
624
625 // Use command-line switch to allow more output.
TEST_F(TestLauncherTest,OutputLimitSwitch)626 TEST_F(TestLauncherTest, OutputLimitSwitch) {
627 AddMockedTests("Test", {"firstTest"});
628 SetUpExpectCalls();
629 command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "never");
630 command_line->AppendSwitchASCII("test-launcher-output-bytes-limit", "800000");
631 TestResult test_result =
632 GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
633 Milliseconds(30), std::string(500000, 'a'));
634 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
635 .WillOnce(OnTestResult(&test_launcher, test_result));
636 EXPECT_TRUE(test_launcher.Run(command_line.get()));
637 }
638
639 // Shard index must be lesser than total shards
TEST_F(TestLauncherTest,FaultyShardSetup)640 TEST_F(TestLauncherTest, FaultyShardSetup) {
641 command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
642 command_line->AppendSwitchASCII("test-launcher-shard-index", "2");
643 EXPECT_FALSE(test_launcher.Run(command_line.get()));
644 }
645
646 // Shard index must be lesser than total shards
TEST_F(TestLauncherTest,RedirectStdio)647 TEST_F(TestLauncherTest, RedirectStdio) {
648 AddMockedTests("Test", {"firstTest"});
649 SetUpExpectCalls();
650 command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "always");
651 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
652 .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
653 TestResult::TEST_SUCCESS));
654 EXPECT_TRUE(test_launcher.Run(command_line.get()));
655 }
656
657 // Sharding should be stable and always selecting the same tests.
TEST_F(TestLauncherTest,StableSharding)658 TEST_F(TestLauncherTest, StableSharding) {
659 AddMockedTests("Test", {"firstTest", "secondTest", "thirdTest"});
660 SetUpExpectCalls();
661 command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
662 command_line->AppendSwitchASCII("test-launcher-shard-index", "0");
663 command_line->AppendSwitch("test-launcher-stable-sharding");
664 std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
665 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
666 _,
667 testing::ElementsAreArray(tests_names.cbegin(),
668 tests_names.cend()),
669 _, _))
670 .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
671 TestResult::TEST_SUCCESS),
672 OnTestResult(&test_launcher, "Test.secondTest",
673 TestResult::TEST_SUCCESS)));
674 EXPECT_TRUE(test_launcher.Run(command_line.get()));
675 }
676
677 // Validate |iteration_data| contains one test result matching |result|.
ValidateTestResultObject(const Value::Dict & iteration_data,TestResult & test_result)678 bool ValidateTestResultObject(const Value::Dict& iteration_data,
679 TestResult& test_result) {
680 const Value::List* results = iteration_data.FindList(test_result.full_name);
681 if (!results) {
682 ADD_FAILURE() << "Results not found";
683 return false;
684 }
685 if (1u != results->size()) {
686 ADD_FAILURE() << "Expected one result, actual: " << results->size();
687 return false;
688 }
689 const Value::Dict* dict = (*results)[0].GetIfDict();
690 if (!dict) {
691 ADD_FAILURE() << "Unexpected type";
692 return false;
693 }
694
695 using test_launcher_utils::ValidateKeyValue;
696 bool result = ValidateKeyValue(*dict, "elapsed_time_ms",
697 test_result.elapsed_time.InMilliseconds());
698
699 if (!dict->FindBool("losless_snippet").value_or(false)) {
700 ADD_FAILURE() << "losless_snippet expected to be true";
701 result = false;
702 }
703
704 result &=
705 ValidateKeyValue(*dict, "output_snippet", test_result.output_snippet);
706
707 std::string base64_output_snippet =
708 base::Base64Encode(test_result.output_snippet);
709 result &=
710 ValidateKeyValue(*dict, "output_snippet_base64", base64_output_snippet);
711
712 result &= ValidateKeyValue(*dict, "status", test_result.StatusAsString());
713
714 const Value::List* list = dict->FindList("result_parts");
715 if (test_result.test_result_parts.size() != list->size()) {
716 ADD_FAILURE() << "test_result_parts count is not valid";
717 return false;
718 }
719
720 for (unsigned i = 0; i < test_result.test_result_parts.size(); i++) {
721 TestResultPart result_part = test_result.test_result_parts.at(i);
722 const Value::Dict& part_dict = (*list)[i].GetDict();
723
724 result &= ValidateKeyValue(part_dict, "type", result_part.TypeAsString());
725 result &= ValidateKeyValue(part_dict, "file", result_part.file_name);
726 result &= ValidateKeyValue(part_dict, "line", result_part.line_number);
727 result &= ValidateKeyValue(part_dict, "summary", result_part.summary);
728 result &= ValidateKeyValue(part_dict, "message", result_part.message);
729 }
730 return result;
731 }
732
733 // Validate |root| dictionary value contains a list with |values|
734 // at |key| value.
ValidateStringList(const std::optional<Value::Dict> & root,const std::string & key,std::vector<const char * > values)735 bool ValidateStringList(const std::optional<Value::Dict>& root,
736 const std::string& key,
737 std::vector<const char*> values) {
738 const Value::List* list = root->FindList(key);
739 if (!list) {
740 ADD_FAILURE() << "|root| has no list_value in key: " << key;
741 return false;
742 }
743
744 if (values.size() != list->size()) {
745 ADD_FAILURE() << "expected size: " << values.size()
746 << ", actual size:" << list->size();
747 return false;
748 }
749
750 for (unsigned i = 0; i < values.size(); i++) {
751 if (!(*list)[i].is_string() &&
752 (*list)[i].GetString().compare(values.at(i))) {
753 ADD_FAILURE() << "Expected list values do not match actual list";
754 return false;
755 }
756 }
757 return true;
758 }
759
760 // Unit tests to validate TestLauncher outputs the correct JSON file.
TEST_F(TestLauncherTest,JsonSummary)761 TEST_F(TestLauncherTest, JsonSummary) {
762 AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
763 AddMockedTests("Test",
764 {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
765 SetUpExpectCalls();
766
767 ASSERT_TRUE(dir.CreateUniqueTempDir());
768 FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
769 command_line->AppendSwitchPath("test-launcher-summary-output", path);
770 command_line->AppendSwitchASCII("gtest_repeat", "2");
771 // Force the repeats to run sequentially.
772 command_line->AppendSwitch("gtest_break_on_failure");
773
774 // Setup results to be returned by the test launcher delegate.
775 TestResult first_result =
776 GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
777 Milliseconds(30), "output_first");
778 first_result.test_result_parts.push_back(GenerateTestResultPart(
779 TestResultPart::kSuccess, "TestFile", 110, "summary", "message"));
780 TestResult second_result =
781 GenerateTestResult("Test.secondTest", TestResult::TEST_SUCCESS,
782 Milliseconds(50), "output_second");
783
784 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
785 .Times(2)
786 .WillRepeatedly(
787 ::testing::DoAll(OnTestResult(&test_launcher, first_result),
788 OnTestResult(&test_launcher, second_result)));
789 EXPECT_TRUE(test_launcher.Run(command_line.get()));
790
791 // Validate the resulting JSON file is the expected output.
792 std::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
793 ASSERT_TRUE(root);
794 EXPECT_TRUE(
795 ValidateStringList(root, "all_tests",
796 {"Test.firstTest", "Test.firstTestDisabled",
797 "Test.secondTest", "TestDisabled.firstTest"}));
798 EXPECT_TRUE(
799 ValidateStringList(root, "disabled_tests",
800 {"Test.firstTestDisabled", "TestDisabled.firstTest"}));
801
802 const Value::Dict* dict = root->FindDict("test_locations");
803 ASSERT_TRUE(dict);
804 EXPECT_EQ(2u, dict->size());
805 ASSERT_TRUE(test_launcher_utils::ValidateTestLocation(*dict, "Test.firstTest",
806 "File", 100));
807 ASSERT_TRUE(test_launcher_utils::ValidateTestLocation(
808 *dict, "Test.secondTest", "File", 100));
809
810 const Value::List* list = root->FindList("per_iteration_data");
811 ASSERT_TRUE(list);
812 ASSERT_EQ(2u, list->size());
813 for (const auto& iteration_val : *list) {
814 ASSERT_TRUE(iteration_val.is_dict());
815 const base::Value::Dict& iteration_dict = iteration_val.GetDict();
816 EXPECT_EQ(2u, iteration_dict.size());
817 EXPECT_TRUE(ValidateTestResultObject(iteration_dict, first_result));
818 EXPECT_TRUE(ValidateTestResultObject(iteration_dict, second_result));
819 }
820 }
821
822 // Validate TestLauncher outputs the correct JSON file
823 // when running disabled tests.
TEST_F(TestLauncherTest,JsonSummaryWithDisabledTests)824 TEST_F(TestLauncherTest, JsonSummaryWithDisabledTests) {
825 AddMockedTests("Test", {"DISABLED_Test"});
826 SetUpExpectCalls();
827
828 ASSERT_TRUE(dir.CreateUniqueTempDir());
829 FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
830 command_line->AppendSwitchPath("test-launcher-summary-output", path);
831 command_line->AppendSwitch("gtest_also_run_disabled_tests");
832
833 // Setup results to be returned by the test launcher delegate.
834 TestResult test_result =
835 GenerateTestResult("Test.DISABLED_Test", TestResult::TEST_SUCCESS,
836 Milliseconds(50), "output_second");
837
838 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
839 .WillOnce(OnTestResult(&test_launcher, test_result));
840 EXPECT_TRUE(test_launcher.Run(command_line.get()));
841
842 // Validate the resulting JSON file is the expected output.
843 std::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
844 ASSERT_TRUE(root);
845 Value::Dict* dict = root->FindDict("test_locations");
846 ASSERT_TRUE(dict);
847 EXPECT_EQ(1u, dict->size());
848 EXPECT_TRUE(test_launcher_utils::ValidateTestLocation(
849 *dict, "Test.DISABLED_Test", "File", 100));
850
851 Value::List* list = root->FindList("per_iteration_data");
852 ASSERT_TRUE(list);
853 ASSERT_EQ(1u, list->size());
854
855 Value::Dict* iteration_dict = (*list)[0].GetIfDict();
856 ASSERT_TRUE(iteration_dict);
857 EXPECT_EQ(1u, iteration_dict->size());
858 // We expect the result to be stripped of disabled prefix.
859 test_result.full_name = "Test.Test";
860 EXPECT_TRUE(ValidateTestResultObject(*iteration_dict, test_result));
861 }
862
863 // Matches a std::tuple<const FilePath&, const FilePath&> where the first
864 // item is a parent of the second.
865 MATCHER(DirectoryIsParentOf, "") {
866 return std::get<0>(arg).IsParent(std::get<1>(arg));
867 }
868
869 // Test that the launcher creates a dedicated temp dir for a child proc and
870 // cleans it up.
TEST_F(TestLauncherTest,TestChildTempDir)871 TEST_F(TestLauncherTest, TestChildTempDir) {
872 AddMockedTests("Test", {"firstTest"});
873 SetUpExpectCalls();
874 ON_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
875 .WillByDefault(OnTestResult(&test_launcher, "Test.firstTest",
876 TestResult::TEST_SUCCESS));
877
878 FilePath task_temp;
879 if (TestLauncher::SupportsPerChildTempDirs()) {
880 // Platforms that support child proc temp dirs must get a |child_temp_dir|
881 // arg that exists and is within |task_temp_dir|.
882 EXPECT_CALL(
883 test_launcher,
884 LaunchChildGTestProcess(
885 _, _, _, ::testing::ResultOf(DirectoryExists, ::testing::IsTrue())))
886 .With(::testing::Args<2, 3>(DirectoryIsParentOf()))
887 .WillOnce(::testing::SaveArg<2>(&task_temp));
888 } else {
889 // Platforms that don't support child proc temp dirs must get an empty
890 // |child_temp_dir| arg.
891 EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, FilePath()))
892 .WillOnce(::testing::SaveArg<2>(&task_temp));
893 }
894
895 EXPECT_TRUE(test_launcher.Run(command_line.get()));
896
897 // The task's temporary directory should have been deleted.
898 EXPECT_FALSE(DirectoryExists(task_temp));
899 }
900
901 #if BUILDFLAG(IS_FUCHSIA)
902 // Verifies that test processes have /data, /cache and /tmp available.
TEST_F(TestLauncherTest,ProvidesDataCacheAndTmpDirs)903 TEST_F(TestLauncherTest, ProvidesDataCacheAndTmpDirs) {
904 EXPECT_TRUE(base::DirectoryExists(base::FilePath("/data")));
905 EXPECT_TRUE(base::DirectoryExists(base::FilePath("/cache")));
906 EXPECT_TRUE(base::DirectoryExists(base::FilePath("/tmp")));
907 }
908 #endif // BUILDFLAG(IS_FUCHSIA)
909
910 // Unit tests to validate UnitTestLauncherDelegate implementation.
911 class UnitTestLauncherDelegateTester : public testing::Test {
912 protected:
913 DefaultUnitTestPlatformDelegate defaultPlatform;
914 ScopedTempDir dir;
915
916 private:
917 base::test::TaskEnvironment task_environment;
918 };
919
920 // Validate delegate produces correct command line.
TEST_F(UnitTestLauncherDelegateTester,GetCommandLine)921 TEST_F(UnitTestLauncherDelegateTester, GetCommandLine) {
922 UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 10u, true,
923 DoNothing());
924 TestLauncherDelegate* delegate_ptr = &launcher_delegate;
925
926 std::vector<std::string> test_names(5, "Tests");
927 base::FilePath temp_dir;
928 base::FilePath result_file;
929 CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
930
931 // Make sure that `--gtest_filter` from the original command line is dropped
932 // from a command line passed to the child process, since `--gtest_filter` is
933 // also specified in the flagfile.
934 CommandLine::ForCurrentProcess()->AppendSwitchASCII("gtest_filter", "*");
935 // But other random flags should be preserved.
936 CommandLine::ForCurrentProcess()->AppendSwitch("mochi-are-delicious");
937 CommandLine cmd_line =
938 delegate_ptr->GetCommandLine(test_names, temp_dir, &result_file);
939 EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
940 EXPECT_FALSE(cmd_line.HasSwitch("gtest_filter"));
941 EXPECT_TRUE(cmd_line.HasSwitch("mochi-are-delicious"));
942 EXPECT_EQ(cmd_line.GetSwitchValuePath("test-launcher-output"), result_file);
943
944 const int size = 2048;
945 std::string content;
946 ASSERT_TRUE(ReadFileToStringWithMaxSize(
947 cmd_line.GetSwitchValuePath("gtest_flagfile"), &content, size));
948 EXPECT_EQ(content.find("--gtest_filter="), 0u);
949 base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
950 std::vector<std::string> gtest_filter_tests =
951 SplitString(content, ":", TRIM_WHITESPACE, SPLIT_WANT_ALL);
952 ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
953 for (unsigned i = 0; i < test_names.size(); i++) {
954 EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
955 }
956 }
957
958 // Verify that a result watcher can stop polling early when all tests complete.
TEST_F(ResultWatcherTest,PollCompletesQuickly)959 TEST_F(ResultWatcherTest, PollCompletesQuickly) {
960 ASSERT_TRUE(dir.CreateUniqueTempDir());
961 FilePath result_file = CreateResultFile();
962 ASSERT_TRUE(AppendToFile(
963 result_file,
964 StrCat({" <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
965 TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n",
966 " <testcase name=\"B\" status=\"run\" time=\"0.500\" "
967 "classname=\"A\" timestamp=\"",
968 TimeFormatAsIso8601(Time::Now()).c_str(), "\">\n",
969 " </testcase>\n",
970 " <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
971 TimeFormatAsIso8601(Time::Now() + Milliseconds(500)).c_str(),
972 "\" />\n",
973 " <testcase name=\"C\" status=\"run\" time=\"0.500\" "
974 "classname=\"A\" timestamp=\"",
975 TimeFormatAsIso8601(Time::Now() + Milliseconds(500)).c_str(),
976 "\">\n", " </testcase>\n", " </testsuite>\n",
977 "</testsuites>\n"})));
978
979 MockResultWatcher result_watcher(result_file, 2);
980 EXPECT_CALL(result_watcher, WaitWithTimeout(_))
981 .WillOnce(DoAll(InvokeWithoutArgs([&]() {
982 task_environment.AdvanceClock(Milliseconds(1500));
983 }),
984 Return(true)));
985
986 Time start = Time::Now();
987 ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
988 ASSERT_EQ(Time::Now() - start, Milliseconds(1500));
989 }
990
991 // Verify that a result watcher repeatedly checks the file for a batch of slow
992 // tests. Each test completes in 40s, which is just under the timeout of 45s.
TEST_F(ResultWatcherTest,PollCompletesSlowly)993 TEST_F(ResultWatcherTest, PollCompletesSlowly) {
994 SCOPED_TRACE(::testing::Message() << "Start ticks: " << TimeTicks::Now());
995
996 ASSERT_TRUE(dir.CreateUniqueTempDir());
997 FilePath result_file = CreateResultFile();
998 const Time start = Time::Now();
999 ASSERT_TRUE(AppendToFile(
1000 result_file,
1001 StrCat({" <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1002 TimeFormatAsIso8601(start).c_str(), "\" />\n"})));
1003
1004 MockResultWatcher result_watcher(result_file, 10);
1005 size_t checks = 0;
1006 bool done = false;
1007 EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1008 .Times(10)
1009 .WillRepeatedly(
1010 DoAll(Invoke([&](TimeDelta timeout) {
1011 task_environment.AdvanceClock(timeout);
1012 // Append a result with "time" (duration) as 40.000s and
1013 // "timestamp" (test start) as `Now()` - 45s.
1014 AppendToFile(
1015 result_file,
1016 StrCat({" <testcase name=\"B\" status=\"run\" "
1017 "time=\"40.000\" classname=\"A\" timestamp=\"",
1018 TimeFormatAsIso8601(Time::Now() - Seconds(45))
1019 .c_str(),
1020 "\">\n", " </testcase>\n"}));
1021 checks++;
1022 if (checks == 10) {
1023 AppendToFile(result_file,
1024 " </testsuite>\n"
1025 "</testsuites>\n");
1026 done = true;
1027 } else {
1028 // Append a preliminary result for the next test that
1029 // started when the last test completed (i.e., `Now()` - 45s
1030 // + 40s).
1031 AppendToFile(
1032 result_file,
1033 StrCat({" <x-teststart name=\"B\" classname=\"A\" "
1034 "timestamp=\"",
1035 TimeFormatAsIso8601(Time::Now() - Seconds(5))
1036 .c_str(),
1037 "\" />\n"}));
1038 }
1039 }),
1040 ReturnPointee(&done)));
1041
1042 ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1043 // The first check occurs 45s after the batch starts, so the sequence of
1044 // events looks like:
1045 // 00:00 - Test 1 starts
1046 // 00:40 - Test 1 completes, test 2 starts
1047 // 00:45 - Check 1 occurs
1048 // 01:20 - Test 2 completes, test 3 starts
1049 // 01:25 - Check 2 occurs
1050 // 02:00 - Test 3 completes, test 4 starts
1051 // 02:05 - Check 3 occurs
1052 // ...
1053 ASSERT_EQ(Time::Now() - start, Seconds(45 + 40 * 9));
1054 }
1055
1056 // Verify that the result watcher identifies when a test times out.
TEST_F(ResultWatcherTest,PollTimeout)1057 TEST_F(ResultWatcherTest, PollTimeout) {
1058 ASSERT_TRUE(dir.CreateUniqueTempDir());
1059 FilePath result_file = CreateResultFile();
1060 ASSERT_TRUE(AppendToFile(
1061 result_file,
1062 StrCat({" <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1063 TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n"})));
1064
1065 MockResultWatcher result_watcher(result_file, 10);
1066 EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1067 .Times(2)
1068 .WillRepeatedly(
1069 DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1070 Return(false)));
1071
1072 Time start = Time::Now();
1073 ASSERT_FALSE(result_watcher.PollUntilDone(Seconds(45)));
1074 // Include a small grace period.
1075 ASSERT_EQ(Time::Now() - start, Seconds(45) + TestTimeouts::tiny_timeout());
1076 }
1077
1078 // Verify that the result watcher retries incomplete reads.
TEST_F(ResultWatcherTest,RetryIncompleteResultRead)1079 TEST_F(ResultWatcherTest, RetryIncompleteResultRead) {
1080 ASSERT_TRUE(dir.CreateUniqueTempDir());
1081 FilePath result_file = CreateResultFile();
1082 // Opening "<summary>" tag is not closed.
1083 ASSERT_TRUE(AppendToFile(
1084 result_file,
1085 StrCat({" <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1086 TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n",
1087 " <testcase name=\"B\" status=\"run\" time=\"40.000\" "
1088 "classname=\"A\" timestamp=\"",
1089 TimeFormatAsIso8601(Time::Now()).c_str(), "\">\n",
1090 " <summary>"})));
1091
1092 MockResultWatcher result_watcher(result_file, 2);
1093 size_t attempts = 0;
1094 bool done = false;
1095 EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1096 .Times(5)
1097 .WillRepeatedly(DoAll(Invoke([&](TimeDelta timeout) {
1098 task_environment.AdvanceClock(timeout);
1099 // Don't bother writing the rest of the file when
1100 // this test completes.
1101 done = ++attempts >= 5;
1102 }),
1103 ReturnPointee(&done)));
1104
1105 Time start = Time::Now();
1106 ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1107 ASSERT_EQ(Time::Now() - start,
1108 Seconds(45) + 4 * TestTimeouts::tiny_timeout());
1109 }
1110
1111 // Verify that the result watcher continues polling with the base timeout when
1112 // the clock jumps backward.
TEST_F(ResultWatcherTest,PollWithClockJumpBackward)1113 TEST_F(ResultWatcherTest, PollWithClockJumpBackward) {
1114 ASSERT_TRUE(dir.CreateUniqueTempDir());
1115 FilePath result_file = CreateResultFile();
1116 // Cannot move the mock time source backward, so write future timestamps into
1117 // the result file instead.
1118 Time time_before_change = Time::Now() + Hours(1);
1119 ASSERT_TRUE(AppendToFile(
1120 result_file,
1121 StrCat(
1122 {" <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1123 TimeFormatAsIso8601(time_before_change).c_str(), "\" />\n",
1124 " <testcase name=\"B\" status=\"run\" time=\"0.500\" "
1125 "classname=\"A\" timestamp=\"",
1126 TimeFormatAsIso8601(time_before_change).c_str(), "\">\n",
1127 " </testcase>\n",
1128 " <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
1129 TimeFormatAsIso8601(time_before_change + Milliseconds(500)).c_str(),
1130 "\" />\n"})));
1131
1132 MockResultWatcher result_watcher(result_file, 2);
1133 EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1134 .WillOnce(
1135 DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1136 Return(false)))
1137 .WillOnce(
1138 DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1139 Return(true)));
1140
1141 Time start = Time::Now();
1142 ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1143 ASSERT_EQ(Time::Now() - start, Seconds(90));
1144 }
1145
1146 // Verify that the result watcher continues polling with the base timeout when
1147 // the clock jumps forward.
TEST_F(ResultWatcherTest,PollWithClockJumpForward)1148 TEST_F(ResultWatcherTest, PollWithClockJumpForward) {
1149 ASSERT_TRUE(dir.CreateUniqueTempDir());
1150 FilePath result_file = CreateResultFile();
1151 ASSERT_TRUE(AppendToFile(
1152 result_file,
1153 StrCat({" <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1154 TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n",
1155 " <testcase name=\"B\" status=\"run\" time=\"0.500\" "
1156 "classname=\"A\" timestamp=\"",
1157 TimeFormatAsIso8601(Time::Now()).c_str(), "\">\n",
1158 " </testcase>\n",
1159 " <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
1160 TimeFormatAsIso8601(Time::Now() + Milliseconds(500)).c_str(),
1161 "\" />\n"})));
1162 task_environment.AdvanceClock(Hours(1));
1163
1164 MockResultWatcher result_watcher(result_file, 2);
1165 EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1166 .WillOnce(
1167 DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1168 Return(false)))
1169 .WillOnce(
1170 DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1171 Return(true)));
1172
1173 Time start = Time::Now();
1174 ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1175 ASSERT_EQ(Time::Now() - start, Seconds(90));
1176 }
1177
1178 // Validate delegate sets batch size correctly.
TEST_F(UnitTestLauncherDelegateTester,BatchSize)1179 TEST_F(UnitTestLauncherDelegateTester, BatchSize) {
1180 UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 15u, true,
1181 DoNothing());
1182 TestLauncherDelegate* delegate_ptr = &launcher_delegate;
1183 EXPECT_EQ(delegate_ptr->GetBatchSize(), 15u);
1184 }
1185
1186 // The following 4 tests are disabled as they are meant to only run from
1187 // |RunMockTests| to validate tests launcher output for known results. The tests
1188 // are expected to run in order within a same batch.
1189
1190 // Basic test to pass
TEST(MockUnitTests,DISABLED_PassTest)1191 TEST(MockUnitTests, DISABLED_PassTest) {
1192 ASSERT_TRUE(true);
1193 }
1194 // Basic test to fail
TEST(MockUnitTests,DISABLED_FailTest)1195 TEST(MockUnitTests, DISABLED_FailTest) {
1196 ASSERT_TRUE(false);
1197 }
1198 // Basic test to crash
TEST(MockUnitTests,DISABLED_CrashTest)1199 TEST(MockUnitTests, DISABLED_CrashTest) {
1200 ImmediateCrash();
1201 }
1202 // Basic test will not be reached, due to the preceding crash in the same batch.
TEST(MockUnitTests,DISABLED_NoRunTest)1203 TEST(MockUnitTests, DISABLED_NoRunTest) {
1204 ASSERT_TRUE(true);
1205 }
1206
1207 // Using TestLauncher to launch 3 basic unitests
1208 // and validate the resulting json file.
TEST_F(UnitTestLauncherDelegateTester,RunMockTests)1209 TEST_F(UnitTestLauncherDelegateTester, RunMockTests) {
1210 CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1211 command_line.AppendSwitchASCII("gtest_filter", "MockUnitTests.DISABLED_*");
1212
1213 ASSERT_TRUE(dir.CreateUniqueTempDir());
1214 FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
1215 command_line.AppendSwitchPath("test-launcher-summary-output", path);
1216 command_line.AppendSwitch("gtest_also_run_disabled_tests");
1217 command_line.AppendSwitchASCII("test-launcher-retry-limit", "0");
1218
1219 std::string output;
1220 GetAppOutputAndError(command_line, &output);
1221
1222 // Validate the resulting JSON file is the expected output.
1223 std::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
1224 ASSERT_TRUE(root);
1225
1226 const Value::Dict* dict = root->FindDict("test_locations");
1227 ASSERT_TRUE(dict);
1228 EXPECT_EQ(4u, dict->size());
1229
1230 EXPECT_TRUE(
1231 test_launcher_utils::ValidateTestLocations(*dict, "MockUnitTests"));
1232
1233 const Value::List* list = root->FindList("per_iteration_data");
1234 ASSERT_TRUE(list);
1235 ASSERT_EQ(1u, list->size());
1236
1237 const Value::Dict* iteration_dict = (*list)[0].GetIfDict();
1238 ASSERT_TRUE(iteration_dict);
1239 EXPECT_EQ(4u, iteration_dict->size());
1240 // We expect the result to be stripped of disabled prefix.
1241 EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1242 *iteration_dict, "MockUnitTests.PassTest", "SUCCESS", 0u));
1243 EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1244 *iteration_dict, "MockUnitTests.FailTest", "FAILURE", 1u));
1245 EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1246 *iteration_dict, "MockUnitTests.CrashTest", "CRASH", 0u));
1247 EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1248 *iteration_dict, "MockUnitTests.NoRunTest", "NOTRUN", 0u,
1249 /*have_running_info=*/false));
1250 }
1251
TEST(ProcessGTestOutputTest,RunMockTests)1252 TEST(ProcessGTestOutputTest, RunMockTests) {
1253 ScopedTempDir dir;
1254 CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1255 command_line.AppendSwitchASCII("gtest_filter", "MockUnitTests.DISABLED_*");
1256
1257 ASSERT_TRUE(dir.CreateUniqueTempDir());
1258 FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.xml");
1259 command_line.AppendSwitchPath("test-launcher-output", path);
1260 command_line.AppendSwitch("gtest_also_run_disabled_tests");
1261 command_line.AppendSwitch("single-process-tests");
1262
1263 std::string output;
1264 GetAppOutputAndError(command_line, &output);
1265
1266 std::vector<TestResult> test_results;
1267 bool crashed = false;
1268 bool have_test_results = ProcessGTestOutput(path, &test_results, &crashed);
1269
1270 EXPECT_TRUE(have_test_results);
1271 EXPECT_TRUE(crashed);
1272 ASSERT_EQ(test_results.size(), 3u);
1273
1274 EXPECT_EQ(test_results[0].full_name, "MockUnitTests.DISABLED_PassTest");
1275 EXPECT_EQ(test_results[0].status, TestResult::TEST_SUCCESS);
1276 EXPECT_EQ(test_results[0].test_result_parts.size(), 0u);
1277 ASSERT_TRUE(test_results[0].timestamp.has_value());
1278 EXPECT_GT(*test_results[0].timestamp, Time());
1279 EXPECT_FALSE(test_results[0].thread_id);
1280 EXPECT_FALSE(test_results[0].process_num);
1281
1282 EXPECT_EQ(test_results[1].full_name, "MockUnitTests.DISABLED_FailTest");
1283 EXPECT_EQ(test_results[1].status, TestResult::TEST_FAILURE);
1284 EXPECT_EQ(test_results[1].test_result_parts.size(), 1u);
1285 ASSERT_TRUE(test_results[1].timestamp.has_value());
1286 EXPECT_GT(*test_results[1].timestamp, Time());
1287
1288 EXPECT_EQ(test_results[2].full_name, "MockUnitTests.DISABLED_CrashTest");
1289 EXPECT_EQ(test_results[2].status, TestResult::TEST_CRASH);
1290 EXPECT_EQ(test_results[2].test_result_parts.size(), 0u);
1291 ASSERT_TRUE(test_results[2].timestamp.has_value());
1292 EXPECT_GT(*test_results[2].timestamp, Time());
1293 }
1294
1295 // TODO(crbug.com/1498237): Enable the test once GetAppOutputAndError
1296 // can collect stdout and stderr on Fuchsia.
1297 #if !BUILDFLAG(IS_FUCHSIA)
TEST(ProcessGTestOutputTest,FoundTestCaseNotEnforced)1298 TEST(ProcessGTestOutputTest, FoundTestCaseNotEnforced) {
1299 ScopedTempDir dir;
1300 ASSERT_TRUE(dir.CreateUniqueTempDir());
1301 FilePath path = dir.GetPath().AppendASCII("test.filter");
1302 WriteFile(path, "Test.firstTest\nTest.secondTest");
1303 CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1304 command_line.AppendSwitchPath("test-launcher-filter-file", path);
1305 command_line.AppendSwitch("enforce-exact-positive-filter");
1306 std::string output;
1307 // Test cases in the filter do not exist, hence test launcher should
1308 // fail and print their names.
1309 EXPECT_FALSE(GetAppOutputAndError(command_line, &output));
1310 // Banner should appear in the output.
1311 const char kBanner[] = "Found exact positive filter not enforced:";
1312 EXPECT_TRUE(Contains(output, kBanner));
1313 std::vector<std::string> lines = base::SplitString(
1314 output, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL);
1315 std::unordered_set<std::string> tests_not_enforced;
1316 bool banner_has_printed = false;
1317 for (size_t i = 0; i < lines.size(); i++) {
1318 if (Contains(lines[i], kBanner)) {
1319 // The following two lines should have the test cases not enforced
1320 // and the third line for the check failure message.
1321 EXPECT_LT(i + 3, lines.size());
1322 // Banner should only appear once.
1323 EXPECT_FALSE(banner_has_printed);
1324 banner_has_printed = true;
1325 continue;
1326 }
1327 if (banner_has_printed && tests_not_enforced.size() < 2) {
1328 // Note, gtest prints the error with datetime and file line info
1329 // ahead to the test names, e.g. below:
1330 // [1030/220237.425678:ERROR:test_launcher.cc(2123)] Test.secondTest
1331 // [1030/220237.425682:ERROR:test_launcher.cc(2123)] Test.firstTest
1332 std::vector<std::string> line_vec = base::SplitString(
1333 lines[i], "]", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
1334 ASSERT_EQ(line_vec.size(), 2u);
1335 tests_not_enforced.insert(line_vec[1]);
1336 continue;
1337 }
1338 if (banner_has_printed && tests_not_enforced.size() == 2) {
1339 // For official builds, they discard logs from CHECK failures, hence
1340 // the test case cannot catch the "Check failed" line.
1341 #if !defined(OFFICIAL_BUILD) || DCHECK_IS_ON()
1342 EXPECT_TRUE(Contains(lines[i],
1343 "Check failed: "
1344 "!found_exact_positive_filter_not_enforced."));
1345 #endif // !defined(OFFICIAL_BUILD) || DCHECK_IS_ON()
1346 break;
1347 }
1348 }
1349 // The test case printed is not ordered, hence need UnorderedElementsAre
1350 // to compare.
1351 EXPECT_THAT(tests_not_enforced, testing::UnorderedElementsAre(
1352 "Test.firstTest", "Test.secondTest"));
1353 }
1354 #endif // !BUILDFLAG(IS_FUCHSIA)
1355
1356 // TODO(crbug.com/1094369): Enable leaked-child checks on other platforms.
1357 #if BUILDFLAG(IS_FUCHSIA)
1358
1359 // Test that leaves a child process running. The test is DISABLED_, so it can
1360 // be launched explicitly by RunMockLeakProcessTest
1361
MULTIPROCESS_TEST_MAIN(LeakChildProcess)1362 MULTIPROCESS_TEST_MAIN(LeakChildProcess) {
1363 while (true)
1364 PlatformThread::Sleep(base::Seconds(1));
1365 }
1366
TEST(LeakedChildProcessTest,DISABLED_LeakChildProcess)1367 TEST(LeakedChildProcessTest, DISABLED_LeakChildProcess) {
1368 Process child_process = SpawnMultiProcessTestChild(
1369 "LeakChildProcess", GetMultiProcessTestChildBaseCommandLine(),
1370 LaunchOptions());
1371 ASSERT_TRUE(child_process.IsValid());
1372 // Don't wait for the child process to exit.
1373 }
1374
1375 // Validate that a test that leaks a process causes the batch to have an
1376 // error exit_code.
TEST_F(UnitTestLauncherDelegateTester,LeakedChildProcess)1377 TEST_F(UnitTestLauncherDelegateTester, LeakedChildProcess) {
1378 CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1379 command_line.AppendSwitchASCII(
1380 "gtest_filter", "LeakedChildProcessTest.DISABLED_LeakChildProcess");
1381
1382 ASSERT_TRUE(dir.CreateUniqueTempDir());
1383 FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
1384 command_line.AppendSwitchPath("test-launcher-summary-output", path);
1385 command_line.AppendSwitch("gtest_also_run_disabled_tests");
1386 command_line.AppendSwitchASCII("test-launcher-retry-limit", "0");
1387
1388 std::string output;
1389 int exit_code = 0;
1390 GetAppOutputWithExitCode(command_line, &output, &exit_code);
1391
1392 // Validate that we actually ran a test.
1393 std::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
1394 ASSERT_TRUE(root);
1395
1396 Value::Dict* dict = root->FindDict("test_locations");
1397 ASSERT_TRUE(dict);
1398 EXPECT_EQ(1u, dict->size());
1399
1400 EXPECT_TRUE(test_launcher_utils::ValidateTestLocations(
1401 *dict, "LeakedChildProcessTest"));
1402
1403 // Validate that the leaked child caused the batch to error-out.
1404 EXPECT_EQ(exit_code, 1);
1405 }
1406 #endif // BUILDFLAG(IS_FUCHSIA)
1407
1408 // Validate GetTestOutputSnippetTest assigns correct output snippet.
TEST(TestLauncherTools,GetTestOutputSnippetTest)1409 TEST(TestLauncherTools, GetTestOutputSnippetTest) {
1410 const std::string output =
1411 "[ RUN ] TestCase.FirstTest\n"
1412 "[ OK ] TestCase.FirstTest (0 ms)\n"
1413 "Post first test output\n"
1414 "[ RUN ] TestCase.SecondTest\n"
1415 "[ FAILED ] TestCase.SecondTest (0 ms)\n"
1416 "[ RUN ] TestCase.ThirdTest\n"
1417 "[ SKIPPED ] TestCase.ThirdTest (0 ms)\n"
1418 "Post second test output";
1419 TestResult result;
1420
1421 // test snippet of a successful test
1422 result.full_name = "TestCase.FirstTest";
1423 result.status = TestResult::TEST_SUCCESS;
1424 EXPECT_EQ(GetTestOutputSnippet(result, output),
1425 "[ RUN ] TestCase.FirstTest\n"
1426 "[ OK ] TestCase.FirstTest (0 ms)\n");
1427
1428 // test snippet of a failure on exit tests should include output
1429 // after test concluded, but not subsequent tests output.
1430 result.status = TestResult::TEST_FAILURE_ON_EXIT;
1431 EXPECT_EQ(GetTestOutputSnippet(result, output),
1432 "[ RUN ] TestCase.FirstTest\n"
1433 "[ OK ] TestCase.FirstTest (0 ms)\n"
1434 "Post first test output\n");
1435
1436 // test snippet of a failed test
1437 result.full_name = "TestCase.SecondTest";
1438 result.status = TestResult::TEST_FAILURE;
1439 EXPECT_EQ(GetTestOutputSnippet(result, output),
1440 "[ RUN ] TestCase.SecondTest\n"
1441 "[ FAILED ] TestCase.SecondTest (0 ms)\n");
1442
1443 // test snippet of a skipped test. Note that the status is SUCCESS because
1444 // the gtest XML format doesn't make a difference between SUCCESS and SKIPPED
1445 result.full_name = "TestCase.ThirdTest";
1446 result.status = TestResult::TEST_SUCCESS;
1447 EXPECT_EQ(GetTestOutputSnippet(result, output),
1448 "[ RUN ] TestCase.ThirdTest\n"
1449 "[ SKIPPED ] TestCase.ThirdTest (0 ms)\n");
1450 }
1451
1452 MATCHER(CheckTruncationPreservesMessage, "") {
1453 // Ensure the inserted message matches the expected pattern.
1454 constexpr char kExpected[] = R"(FATAL.*message\n)";
1455 EXPECT_THAT(arg, ::testing::ContainsRegex(kExpected));
1456
1457 const std::string snippet =
1458 base::StrCat({"[ RUN ] SampleTestSuite.SampleTestName\n"
1459 "Padding log message added for testing purposes\n"
1460 "Padding log message added for testing purposes\n"
1461 "Padding log message added for testing purposes\n"
1462 "Padding log message added for testing purposes\n"
1463 "Padding log message added for testing purposes\n"
1464 "Padding log message added for testing purposes\n",
1465 arg,
1466 "Padding log message added for testing purposes\n"
1467 "Padding log message added for testing purposes\n"
1468 "Padding log message added for testing purposes\n"
1469 "Padding log message added for testing purposes\n"
1470 "Padding log message added for testing purposes\n"
1471 "Padding log message added for testing purposes\n"});
1472
1473 // Strip the stack trace off the end of message.
1474 size_t line_end_pos = arg.find("\n");
1475 std::string first_line = arg.substr(0, line_end_pos + 1);
1476
1477 const std::string result = TruncateSnippetFocused(snippet, 300);
1478 EXPECT_TRUE(result.find(first_line) > 0);
1479 EXPECT_EQ(result.length(), 300UL);
1480 return true;
1481 }
1482
MatchesFatalMessagesTest()1483 void MatchesFatalMessagesTest() {
1484 // Different Chrome test suites have different settings for their logs.
1485 // E.g. unit tests may not show the process ID (as they are single process),
1486 // whereas browser tests usually do (as they are multi-process). This
1487 // affects how log messages are formatted and hence how the log criticality
1488 // i.e. "FATAL", appears in the log message. We test the two extremes --
1489 // all process IDs, timestamps present, and all not present. We also test
1490 // the presence/absence of an extra logging prefix.
1491 {
1492 // Process ID, Thread ID, Timestamp and Tickcount.
1493 logging::SetLogItems(true, true, true, true);
1494 logging::SetLogPrefix(nullptr);
1495 EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1496 CheckTruncationPreservesMessage());
1497 }
1498 {
1499 logging::SetLogItems(false, false, false, false);
1500 logging::SetLogPrefix(nullptr);
1501 EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1502 CheckTruncationPreservesMessage());
1503 }
1504 {
1505 // Process ID, Thread ID, Timestamp and Tickcount.
1506 logging::SetLogItems(true, true, true, true);
1507 logging::SetLogPrefix("mylogprefix");
1508 EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1509 CheckTruncationPreservesMessage());
1510 }
1511 {
1512 logging::SetLogItems(false, false, false, false);
1513 logging::SetLogPrefix("mylogprefix");
1514 EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1515 CheckTruncationPreservesMessage());
1516 }
1517 }
1518
1519 // Validates TestSnippetFocused correctly identifies fatal messages to
1520 // retain during truncation.
TEST(TestLauncherTools,TruncateSnippetFocusedMatchesFatalMessagesTest)1521 TEST(TestLauncherTools, TruncateSnippetFocusedMatchesFatalMessagesTest) {
1522 logging::ScopedLoggingSettings scoped_logging_settings;
1523 #if BUILDFLAG(IS_CHROMEOS_ASH)
1524 scoped_logging_settings.SetLogFormat(logging::LogFormat::LOG_FORMAT_SYSLOG);
1525 #endif
1526 MatchesFatalMessagesTest();
1527 }
1528
1529 #if BUILDFLAG(IS_CHROMEOS_ASH)
1530 // Validates TestSnippetFocused correctly identifies fatal messages to
1531 // retain during truncation, for ChromeOS Ash.
TEST(TestLauncherTools,TruncateSnippetFocusedMatchesFatalMessagesCrosAshTest)1532 TEST(TestLauncherTools, TruncateSnippetFocusedMatchesFatalMessagesCrosAshTest) {
1533 logging::ScopedLoggingSettings scoped_logging_settings;
1534 scoped_logging_settings.SetLogFormat(logging::LogFormat::LOG_FORMAT_CHROME);
1535 MatchesFatalMessagesTest();
1536 }
1537 #endif
1538
1539 // Validate TestSnippetFocused truncates snippets correctly, regardless of
1540 // whether fatal messages appear at the start, middle or end of the snippet.
TEST(TestLauncherTools,TruncateSnippetFocusedTest)1541 TEST(TestLauncherTools, TruncateSnippetFocusedTest) {
1542 // Test where FATAL message appears in the start of the log.
1543 const std::string snippet =
1544 "[ RUN ] "
1545 "EndToEndTests/"
1546 "EndToEndTest.WebTransportSessionUnidirectionalStreamSentEarly/"
1547 "draft29_QBIC\n"
1548 "[26219:26368:FATAL:tls_handshaker.cc(293)] 1-RTT secret(s) not set "
1549 "yet.\n"
1550 "#0 0x55619ad1fcdb in backtrace "
1551 "/b/s/w/ir/cache/builder/src/third_party/llvm/compiler-rt/lib/asan/../"
1552 "sanitizer_common/sanitizer_common_interceptors.inc:4205:13\n"
1553 "#1 0x5561a6bdf519 in base::debug::CollectStackTrace(void**, unsigned "
1554 "long) ./../../base/debug/stack_trace_posix.cc:845:39\n"
1555 "#2 0x5561a69a1293 in StackTrace "
1556 "./../../base/debug/stack_trace.cc:200:12\n"
1557 "...\n";
1558 const std::string result = TruncateSnippetFocused(snippet, 300);
1559 EXPECT_EQ(
1560 result,
1561 "[ RUN ] EndToEndTests/EndToEndTest.WebTransportSessionUnidirection"
1562 "alStreamSentEarly/draft29_QBIC\n"
1563 "[26219:26368:FATAL:tls_handshaker.cc(293)] 1-RTT secret(s) not set "
1564 "yet.\n"
1565 "#0 0x55619ad1fcdb in backtrace /b/s/w/ir/cache/bui\n"
1566 "<truncated (358 bytes)>\n"
1567 "Trace ./../../base/debug/stack_trace.cc:200:12\n"
1568 "...\n");
1569 EXPECT_EQ(result.length(), 300UL);
1570
1571 // Test where FATAL message appears in the middle of the log.
1572 const std::string snippet_two =
1573 "[ RUN ] NetworkingPrivateApiTest.CreateSharedNetwork\n"
1574 "Padding log information added for testing purposes\n"
1575 "Padding log information added for testing purposes\n"
1576 "Padding log information added for testing purposes\n"
1577 "FATAL extensions_unittests[12666:12666]: [managed_network_configuration"
1578 "_handler_impl.cc(525)] Check failed: !guid_str && !guid_str->empty().\n"
1579 "#0 0x562f31dba779 base::debug::CollectStackTrace()\n"
1580 "#1 0x562f31cdf2a3 base::debug::StackTrace::StackTrace()\n"
1581 "#2 0x562f31cf4380 logging::LogMessage::~LogMessage()\n"
1582 "#3 0x562f31cf4d3e logging::LogMessage::~LogMessage()\n";
1583 const std::string result_two = TruncateSnippetFocused(snippet_two, 300);
1584 EXPECT_EQ(
1585 result_two,
1586 "[ RUN ] NetworkingPriv\n"
1587 "<truncated (210 bytes)>\n"
1588 " added for testing purposes\n"
1589 "FATAL extensions_unittests[12666:12666]: [managed_network_configuration"
1590 "_handler_impl.cc(525)] Check failed: !guid_str && !guid_str->empty().\n"
1591 "#0 0x562f31dba779 base::deb\n"
1592 "<truncated (213 bytes)>\n"
1593 ":LogMessage::~LogMessage()\n");
1594 EXPECT_EQ(result_two.length(), 300UL);
1595
1596 // Test where FATAL message appears at end of the log.
1597 const std::string snippet_three =
1598 "[ RUN ] All/PDFExtensionAccessibilityTreeDumpTest.Highlights/"
1599 "linux\n"
1600 "[6741:6741:0716/171816.818448:ERROR:power_monitor_device_source_stub.cc"
1601 "(11)] Not implemented reached in virtual bool base::PowerMonitorDevice"
1602 "Source::IsOnBatteryPower()\n"
1603 "[6741:6741:0716/171816.818912:INFO:content_main_runner_impl.cc(1082)]"
1604 " Chrome is running in full browser mode.\n"
1605 "libva error: va_getDriverName() failed with unknown libva error,driver"
1606 "_name=(null)\n"
1607 "[6741:6741:0716/171817.688633:FATAL:agent_scheduling_group_host.cc(290)"
1608 "] Check failed: message->routing_id() != MSG_ROUTING_CONTROL "
1609 "(2147483647 vs. 2147483647)\n";
1610 const std::string result_three = TruncateSnippetFocused(snippet_three, 300);
1611 EXPECT_EQ(
1612 result_three,
1613 "[ RUN ] All/PDFExtensionAccessibilityTreeDumpTest.Hi\n"
1614 "<truncated (432 bytes)>\n"
1615 "Name() failed with unknown libva error,driver_name=(null)\n"
1616 "[6741:6741:0716/171817.688633:FATAL:agent_scheduling_group_host.cc(290)"
1617 "] Check failed: message->routing_id() != MSG_ROUTING_CONTROL "
1618 "(2147483647 vs. 2147483647)\n");
1619 EXPECT_EQ(result_three.length(), 300UL);
1620
1621 // Test where FATAL message does not appear.
1622 const std::string snippet_four =
1623 "[ RUN ] All/PassingTest/linux\n"
1624 "Padding log line 1 added for testing purposes\n"
1625 "Padding log line 2 added for testing purposes\n"
1626 "Padding log line 3 added for testing purposes\n"
1627 "Padding log line 4 added for testing purposes\n"
1628 "Padding log line 5 added for testing purposes\n"
1629 "Padding log line 6 added for testing purposes\n";
1630 const std::string result_four = TruncateSnippetFocused(snippet_four, 300);
1631 EXPECT_EQ(result_four,
1632 "[ RUN ] All/PassingTest/linux\n"
1633 "Padding log line 1 added for testing purposes\n"
1634 "Padding log line 2 added for testing purposes\n"
1635 "Padding lo\n<truncated (311 bytes)>\n"
1636 "Padding log line 4 added for testing purposes\n"
1637 "Padding log line 5 added for testing purposes\n"
1638 "Padding log line 6 added for testing purposes\n");
1639 EXPECT_EQ(result_four.length(), 300UL);
1640 }
1641
1642 } // namespace
1643
1644 } // namespace base
1645