1#!/usr/bin/env python3 2# 3# Copyright 2018, The Android Open Source Project 4# 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16 17"""Unittests for result_reporter.""" 18 19 20from io import StringIO 21import sys 22import unittest 23from unittest import mock 24from unittest.mock import patch 25 26from atest import arg_parser 27from atest import atest_configs 28from atest import result_reporter 29from atest.test_finders import test_info 30from atest.test_runners import test_runner_base 31 32 33RESULT_PASSED_TEST = test_runner_base.TestResult( 34 runner_name='someTestRunner', 35 group_name='someTestModule', 36 test_name='someClassName#sostName', 37 status=test_runner_base.PASSED_STATUS, 38 details=None, 39 test_count=1, 40 test_time='(10ms)', 41 runner_total=None, 42 group_total=2, 43 additional_info={}, 44 test_run_name='com.android.UnitTests', 45) 46 47RESULT_PASSED_TEST_MODULE_2 = test_runner_base.TestResult( 48 runner_name='someTestRunner', 49 group_name='someTestModule2', 50 test_name='someClassName#sostName', 51 status=test_runner_base.PASSED_STATUS, 52 details=None, 53 test_count=1, 54 test_time='(10ms)', 55 runner_total=None, 56 group_total=2, 57 additional_info={}, 58 test_run_name='com.android.UnitTests', 59) 60 61RESULT_PASSED_TEST_RUNNER_2_NO_MODULE = test_runner_base.TestResult( 62 runner_name='someTestRunner2', 63 group_name=None, 64 test_name='someClassName#sostName', 65 status=test_runner_base.PASSED_STATUS, 66 details=None, 67 test_count=1, 68 test_time='(10ms)', 69 runner_total=None, 70 group_total=2, 71 additional_info={}, 72 test_run_name='com.android.UnitTests', 73) 74 75RESULT_FAILED_TEST = test_runner_base.TestResult( 76 runner_name='someTestRunner', 77 group_name='someTestModule', 78 test_name='someClassName2#sestName2', 79 status=test_runner_base.FAILED_STATUS, 80 details='someTrace', 81 test_count=1, 82 test_time='', 83 runner_total=None, 84 group_total=2, 85 additional_info={}, 86 test_run_name='com.android.UnitTests', 87) 88 89RESULT_RUN_FAILURE = test_runner_base.TestResult( 90 runner_name='someTestRunner', 91 group_name='someTestModule', 92 test_name='someClassName#sostName', 93 status=test_runner_base.ERROR_STATUS, 94 details='someRunFailureReason', 95 test_count=1, 96 test_time='', 97 runner_total=None, 98 group_total=2, 99 additional_info={}, 100 test_run_name='com.android.UnitTests', 101) 102 103RESULT_INVOCATION_FAILURE = test_runner_base.TestResult( 104 runner_name='someTestRunner', 105 group_name=None, 106 test_name=None, 107 status=test_runner_base.ERROR_STATUS, 108 details='someInvocationFailureReason', 109 test_count=1, 110 test_time='', 111 runner_total=None, 112 group_total=None, 113 additional_info={}, 114 test_run_name='com.android.UnitTests', 115) 116 117RESULT_IGNORED_TEST = test_runner_base.TestResult( 118 runner_name='someTestRunner', 119 group_name='someTestModule', 120 test_name='someClassName#sostName', 121 status=test_runner_base.IGNORED_STATUS, 122 details=None, 123 test_count=1, 124 test_time='(10ms)', 125 runner_total=None, 126 group_total=2, 127 additional_info={}, 128 test_run_name='com.android.UnitTests', 129) 130 131RESULT_ASSUMPTION_FAILED_TEST = test_runner_base.TestResult( 132 runner_name='someTestRunner', 133 group_name='someTestModule', 134 test_name='someClassName#sostName', 135 status=test_runner_base.ASSUMPTION_FAILED, 136 details=None, 137 test_count=1, 138 test_time='(10ms)', 139 runner_total=None, 140 group_total=2, 141 additional_info={}, 142 test_run_name='com.android.UnitTests', 143) 144 145ADDITIONAL_INFO_PERF01_TEST01 = { 146 'repetition_index': '0', 147 'cpu_time': '10001.10001', 148 'name': 'perfName01', 149 'repetitions': '0', 150 'run_type': 'iteration', 151 'label': '2123', 152 'threads': '1', 153 'time_unit': 'ns', 154 'iterations': '1001', 155 'run_name': 'perfName01', 156 'real_time': '11001.11001', 157} 158 159RESULT_PERF01_TEST01 = test_runner_base.TestResult( 160 runner_name='someTestRunner', 161 group_name='someTestModule', 162 test_name='somePerfClass01#perfName01', 163 status=test_runner_base.PASSED_STATUS, 164 details=None, 165 test_count=1, 166 test_time='(10ms)', 167 runner_total=None, 168 group_total=2, 169 additional_info=ADDITIONAL_INFO_PERF01_TEST01, 170 test_run_name='com.android.UnitTests', 171) 172 173RESULT_PERF01_TEST02 = test_runner_base.TestResult( 174 runner_name='someTestRunner', 175 group_name='someTestModule', 176 test_name='somePerfClass01#perfName02', 177 status=test_runner_base.PASSED_STATUS, 178 details=None, 179 test_count=1, 180 test_time='(10ms)', 181 runner_total=None, 182 group_total=2, 183 additional_info={ 184 'repetition_index': '0', 185 'cpu_time': '10002.10002', 186 'name': 'perfName02', 187 'repetitions': '0', 188 'run_type': 'iteration', 189 'label': '2123', 190 'threads': '1', 191 'time_unit': 'ns', 192 'iterations': '1002', 193 'run_name': 'perfName02', 194 'real_time': '11002.11002', 195 }, 196 test_run_name='com.android.UnitTests', 197) 198 199RESULT_PERF01_TEST03_NO_CPU_TIME = test_runner_base.TestResult( 200 runner_name='someTestRunner', 201 group_name='someTestModule', 202 test_name='somePerfClass01#perfName03', 203 status=test_runner_base.PASSED_STATUS, 204 details=None, 205 test_count=1, 206 test_time='(10ms)', 207 runner_total=None, 208 group_total=2, 209 additional_info={ 210 'repetition_index': '0', 211 'name': 'perfName03', 212 'repetitions': '0', 213 'run_type': 'iteration', 214 'label': '2123', 215 'threads': '1', 216 'time_unit': 'ns', 217 'iterations': '1003', 218 'run_name': 'perfName03', 219 'real_time': '11003.11003', 220 }, 221 test_run_name='com.android.UnitTests', 222) 223 224RESULT_PERF02_TEST01 = test_runner_base.TestResult( 225 runner_name='someTestRunner', 226 group_name='someTestModule', 227 test_name='somePerfClass02#perfName11', 228 status=test_runner_base.PASSED_STATUS, 229 details=None, 230 test_count=1, 231 test_time='(10ms)', 232 runner_total=None, 233 group_total=2, 234 additional_info={ 235 'repetition_index': '0', 236 'cpu_time': '20001.20001', 237 'name': 'perfName11', 238 'repetitions': '0', 239 'run_type': 'iteration', 240 'label': '2123', 241 'threads': '1', 242 'time_unit': 'ns', 243 'iterations': '2001', 244 'run_name': 'perfName11', 245 'real_time': '21001.21001', 246 }, 247 test_run_name='com.android.UnitTests', 248) 249 250 251# pylint: disable=protected-access 252# pylint: disable=invalid-name 253class ResultReporterUnittests(unittest.TestCase): 254 """Unit tests for result_reporter.py""" 255 256 def setUp(self): 257 self.rr = result_reporter.ResultReporter() 258 259 def tearDown(self): 260 mock.patch.stopall() 261 262 @mock.patch.object(result_reporter.ResultReporter, '_print_group_title') 263 @mock.patch.object(result_reporter.ResultReporter, '_update_stats') 264 @mock.patch.object(result_reporter.ResultReporter, '_print_result') 265 def test_process_test_result(self, mock_print, mock_update, mock_title): 266 """Test process_test_result method.""" 267 # Passed Test 268 self.assertTrue('someTestRunner' not in self.rr.runners) 269 self.rr.process_test_result(RESULT_PASSED_TEST) 270 self.assertTrue('someTestRunner' in self.rr.runners) 271 group = self.rr.runners['someTestRunner'].get('someTestModule') 272 self.assertIsNotNone(group) 273 mock_title.assert_called_with(RESULT_PASSED_TEST) 274 mock_update.assert_called_with(RESULT_PASSED_TEST, group) 275 mock_print.assert_called_with(RESULT_PASSED_TEST) 276 # Failed Test 277 mock_title.reset_mock() 278 self.rr.process_test_result(RESULT_FAILED_TEST) 279 mock_title.assert_not_called() 280 mock_update.assert_called_with(RESULT_FAILED_TEST, group) 281 mock_print.assert_called_with(RESULT_FAILED_TEST) 282 # Test with new Group 283 mock_title.reset_mock() 284 self.rr.process_test_result(RESULT_PASSED_TEST_MODULE_2) 285 self.assertTrue('someTestModule2' in self.rr.runners['someTestRunner']) 286 mock_title.assert_called_with(RESULT_PASSED_TEST_MODULE_2) 287 # Test with new Runner 288 mock_title.reset_mock() 289 self.rr.process_test_result(RESULT_PASSED_TEST_RUNNER_2_NO_MODULE) 290 self.assertTrue('someTestRunner2' in self.rr.runners) 291 mock_title.assert_called_with(RESULT_PASSED_TEST_RUNNER_2_NO_MODULE) 292 293 def test_print_result_run_name(self): 294 """Test print run name function in print_result method.""" 295 try: 296 rr = result_reporter.ResultReporter() 297 capture_output = StringIO() 298 sys.stdout = capture_output 299 run_name = 'com.android.UnitTests' 300 rr._print_result( 301 test_runner_base.TestResult( 302 runner_name='runner_name', 303 group_name='someTestModule', 304 test_name='someClassName#someTestName', 305 status=test_runner_base.FAILED_STATUS, 306 details='someTrace', 307 test_count=2, 308 test_time='(2h44m36.402s)', 309 runner_total=None, 310 group_total=2, 311 additional_info={}, 312 test_run_name=run_name, 313 ) 314 ) 315 # Make sure run name in the first line. 316 capture_output_str = capture_output.getvalue().strip() 317 self.assertTrue(run_name in capture_output_str.split('\n')[0]) 318 run_name2 = 'com.android.UnitTests2' 319 capture_output = StringIO() 320 sys.stdout = capture_output 321 rr._print_result( 322 test_runner_base.TestResult( 323 runner_name='runner_name', 324 group_name='someTestModule', 325 test_name='someClassName#someTestName', 326 status=test_runner_base.FAILED_STATUS, 327 details='someTrace', 328 test_count=2, 329 test_time='(2h43m36.402s)', 330 runner_total=None, 331 group_total=2, 332 additional_info={}, 333 test_run_name=run_name2, 334 ) 335 ) 336 # Make sure run name in the first line. 337 capture_output_str = capture_output.getvalue().strip() 338 self.assertTrue(run_name2 in capture_output_str.split('\n')[0]) 339 finally: 340 sys.stdout = sys.__stdout__ 341 342 def test_register_unsupported_runner(self): 343 """Test register_unsupported_runner method.""" 344 self.rr.register_unsupported_runner('NotSupported') 345 runner = self.rr.runners['NotSupported'] 346 self.assertIsNotNone(runner) 347 self.assertEqual(runner, result_reporter.UNSUPPORTED_FLAG) 348 349 def test_update_stats_passed(self): 350 """Test _update_stats method.""" 351 # Passed Test 352 group = result_reporter.RunStat() 353 self.rr._update_stats(RESULT_PASSED_TEST, group) 354 self.assertEqual(self.rr.run_stats.passed, 1) 355 self.assertEqual(self.rr.run_stats.failed, 0) 356 self.assertEqual(self.rr.run_stats.run_errors, False) 357 self.assertEqual(self.rr.failed_tests, []) 358 self.assertEqual(group.passed, 1) 359 self.assertEqual(group.failed, 0) 360 self.assertEqual(group.ignored, 0) 361 self.assertEqual(group.run_errors, False) 362 # Passed Test New Group 363 group2 = result_reporter.RunStat() 364 self.rr._update_stats(RESULT_PASSED_TEST_MODULE_2, group2) 365 self.assertEqual(self.rr.run_stats.passed, 2) 366 self.assertEqual(self.rr.run_stats.failed, 0) 367 self.assertEqual(self.rr.run_stats.run_errors, False) 368 self.assertEqual(self.rr.failed_tests, []) 369 self.assertEqual(group2.passed, 1) 370 self.assertEqual(group2.failed, 0) 371 self.assertEqual(group.ignored, 0) 372 self.assertEqual(group2.run_errors, False) 373 374 def test_update_stats_failed(self): 375 """Test _update_stats method.""" 376 # Passed Test 377 group = result_reporter.RunStat() 378 self.rr._update_stats(RESULT_PASSED_TEST, group) 379 # Passed Test New Group 380 group2 = result_reporter.RunStat() 381 self.rr._update_stats(RESULT_PASSED_TEST_MODULE_2, group2) 382 # Failed Test Old Group 383 self.rr._update_stats(RESULT_FAILED_TEST, group) 384 self.assertEqual(self.rr.run_stats.passed, 2) 385 self.assertEqual(self.rr.run_stats.failed, 1) 386 self.assertEqual(self.rr.run_stats.run_errors, False) 387 self.assertEqual(self.rr.failed_tests, [RESULT_FAILED_TEST.test_name]) 388 self.assertEqual(group.passed, 1) 389 self.assertEqual(group.failed, 1) 390 self.assertEqual(group.ignored, 0) 391 self.assertEqual(group.total, 2) 392 self.assertEqual(group2.total, 1) 393 self.assertEqual(group.run_errors, False) 394 # Test Run Failure 395 self.rr._update_stats(RESULT_RUN_FAILURE, group) 396 self.assertEqual(self.rr.run_stats.passed, 2) 397 self.assertEqual(self.rr.run_stats.failed, 1) 398 self.assertEqual(self.rr.run_stats.run_errors, True) 399 self.assertEqual(self.rr.failed_tests, [RESULT_FAILED_TEST.test_name]) 400 self.assertEqual(group.passed, 1) 401 self.assertEqual(group.failed, 1) 402 self.assertEqual(group.ignored, 0) 403 self.assertEqual(group.run_errors, True) 404 self.assertEqual(group2.run_errors, False) 405 # Invocation Failure 406 self.rr._update_stats(RESULT_INVOCATION_FAILURE, group) 407 self.assertEqual(self.rr.run_stats.passed, 2) 408 self.assertEqual(self.rr.run_stats.failed, 1) 409 self.assertEqual(self.rr.run_stats.run_errors, True) 410 self.assertEqual(self.rr.failed_tests, [RESULT_FAILED_TEST.test_name]) 411 self.assertEqual(group.passed, 1) 412 self.assertEqual(group.failed, 1) 413 self.assertEqual(group.ignored, 0) 414 self.assertEqual(group.run_errors, True) 415 416 def test_update_stats_ignored_and_assumption_failure(self): 417 """Test _update_stats method.""" 418 # Passed Test 419 group = result_reporter.RunStat() 420 self.rr._update_stats(RESULT_PASSED_TEST, group) 421 # Passed Test New Group 422 group2 = result_reporter.RunStat() 423 self.rr._update_stats(RESULT_PASSED_TEST_MODULE_2, group2) 424 # Failed Test Old Group 425 self.rr._update_stats(RESULT_FAILED_TEST, group) 426 # Test Run Failure 427 self.rr._update_stats(RESULT_RUN_FAILURE, group) 428 # Invocation Failure 429 self.rr._update_stats(RESULT_INVOCATION_FAILURE, group) 430 # Ignored Test 431 self.rr._update_stats(RESULT_IGNORED_TEST, group) 432 self.assertEqual(self.rr.run_stats.passed, 2) 433 self.assertEqual(self.rr.run_stats.failed, 1) 434 self.assertEqual(self.rr.run_stats.run_errors, True) 435 self.assertEqual(self.rr.failed_tests, [RESULT_FAILED_TEST.test_name]) 436 self.assertEqual(group.passed, 1) 437 self.assertEqual(group.failed, 1) 438 self.assertEqual(group.ignored, 1) 439 self.assertEqual(group.run_errors, True) 440 # 2nd Ignored Test 441 self.rr._update_stats(RESULT_IGNORED_TEST, group) 442 self.assertEqual(self.rr.run_stats.passed, 2) 443 self.assertEqual(self.rr.run_stats.failed, 1) 444 self.assertEqual(self.rr.run_stats.run_errors, True) 445 self.assertEqual(self.rr.failed_tests, [RESULT_FAILED_TEST.test_name]) 446 self.assertEqual(group.passed, 1) 447 self.assertEqual(group.failed, 1) 448 self.assertEqual(group.ignored, 2) 449 self.assertEqual(group.run_errors, True) 450 451 # Assumption_Failure test 452 self.rr._update_stats(RESULT_ASSUMPTION_FAILED_TEST, group) 453 self.assertEqual(group.assumption_failed, 1) 454 # 2nd Assumption_Failure test 455 self.rr._update_stats(RESULT_ASSUMPTION_FAILED_TEST, group) 456 self.assertEqual(group.assumption_failed, 2) 457 458 @patch.object( 459 atest_configs, 460 'GLOBAL_ARGS', 461 arg_parser.create_atest_arg_parser().parse_args([]), 462 ) 463 def test_print_summary_ret_val(self): 464 """Test print_summary method's return value.""" 465 # PASS Case 466 self.rr.process_test_result(RESULT_PASSED_TEST) 467 self.assertEqual(0, self.rr.print_summary()) 468 # PASS Case + Fail Case 469 self.rr.process_test_result(RESULT_FAILED_TEST) 470 self.assertNotEqual(0, self.rr.print_summary()) 471 # PASS Case + Fail Case + PASS Case 472 self.rr.process_test_result(RESULT_PASSED_TEST_MODULE_2) 473 self.assertNotEqual(0, self.rr.print_summary()) 474 475 @patch.object( 476 atest_configs, 477 'GLOBAL_ARGS', 478 arg_parser.create_atest_arg_parser().parse_args([]), 479 ) 480 def test_print_summary_ret_val_err_stat(self): 481 """Test print_summary method's return value.""" 482 # PASS Case 483 self.rr.process_test_result(RESULT_PASSED_TEST) 484 self.assertEqual(0, self.rr.print_summary()) 485 # PASS Case + Fail Case 486 self.rr.process_test_result(RESULT_RUN_FAILURE) 487 self.assertNotEqual(0, self.rr.print_summary()) 488 # PASS Case + Fail Case + PASS Case 489 self.rr.process_test_result(RESULT_PASSED_TEST_MODULE_2) 490 self.assertNotEqual(0, self.rr.print_summary()) 491 492 def test_collect_tests_only_no_throw(self): 493 rr = result_reporter.ResultReporter(collect_only=True) 494 rr.process_test_result(RESULT_PASSED_TEST) 495 496 self.assertEqual(0, self.rr.print_collect_tests()) 497 498 def test_update_perf_info(self): 499 """Test update_perf_info method.""" 500 group = result_reporter.RunStat() 501 # 1. Test PerfInfo after RESULT_PERF01_TEST01 502 # _update_stats() will call _update_perf_info() 503 self.rr._update_stats(RESULT_PERF01_TEST01, group) 504 correct_perf_info = [] 505 trim_perf01_test01 = { 506 'repetition_index': '0', 507 'cpu_time': '10001.10001', 508 'name': 'perfName01', 509 'repetitions': '0', 510 'run_type': 'iteration', 511 'label': '2123', 512 'threads': '1', 513 'time_unit': 'ns', 514 'iterations': '1001', 515 'run_name': 'perfName01', 516 'real_time': '11001.11001', 517 'test_name': 'somePerfClass01#perfName01', 518 } 519 correct_perf_info.append(trim_perf01_test01) 520 self.assertEqual(self.rr.run_stats.perf_info.perf_info, correct_perf_info) 521 # 2. Test PerfInfo after RESULT_PERF01_TEST01 522 self.rr._update_stats(RESULT_PERF01_TEST02, group) 523 trim_perf01_test02 = { 524 'repetition_index': '0', 525 'cpu_time': '10002.10002', 526 'name': 'perfName02', 527 'repetitions': '0', 528 'run_type': 'iteration', 529 'label': '2123', 530 'threads': '1', 531 'time_unit': 'ns', 532 'iterations': '1002', 533 'run_name': 'perfName02', 534 'real_time': '11002.11002', 535 'test_name': 'somePerfClass01#perfName02', 536 } 537 correct_perf_info.append(trim_perf01_test02) 538 self.assertEqual(self.rr.run_stats.perf_info.perf_info, correct_perf_info) 539 # 3. Test PerfInfo after RESULT_PERF02_TEST01 540 self.rr._update_stats(RESULT_PERF02_TEST01, group) 541 trim_perf02_test01 = { 542 'repetition_index': '0', 543 'cpu_time': '20001.20001', 544 'name': 'perfName11', 545 'repetitions': '0', 546 'run_type': 'iteration', 547 'label': '2123', 548 'threads': '1', 549 'time_unit': 'ns', 550 'iterations': '2001', 551 'run_name': 'perfName11', 552 'real_time': '21001.21001', 553 'test_name': 'somePerfClass02#perfName11', 554 } 555 correct_perf_info.append(trim_perf02_test01) 556 self.assertEqual(self.rr.run_stats.perf_info.perf_info, correct_perf_info) 557 # 4. Test PerfInfo after RESULT_PERF01_TEST03_NO_CPU_TIME 558 self.rr._update_stats(RESULT_PERF01_TEST03_NO_CPU_TIME, group) 559 # Nothing added since RESULT_PERF01_TEST03_NO_CPU_TIME lack of cpu_time 560 self.assertEqual(self.rr.run_stats.perf_info.perf_info, correct_perf_info) 561 562 def test_classify_perf_info(self): 563 """Test _classify_perf_info method.""" 564 group = result_reporter.RunStat() 565 self.rr._update_stats(RESULT_PERF01_TEST01, group) 566 self.rr._update_stats(RESULT_PERF01_TEST02, group) 567 self.rr._update_stats(RESULT_PERF02_TEST01, group) 568 # trim the time form 10001.10001 to 10001 569 trim_perf01_test01 = { 570 'repetition_index': '0', 571 'cpu_time': '10001.10001', 572 'name': 'perfName01', 573 'repetitions': '0', 574 'run_type': 'iteration', 575 'label': '2123', 576 'threads': '1', 577 'time_unit': 'ns', 578 'iterations': '1001', 579 'run_name': 'perfName01', 580 'real_time': '11001.11001', 581 'test_name': 'somePerfClass01#perfName01', 582 } 583 trim_perf01_test02 = { 584 'repetition_index': '0', 585 'cpu_time': '10002.10002', 586 'name': 'perfName02', 587 'repetitions': '0', 588 'run_type': 'iteration', 589 'label': '2123', 590 'threads': '1', 591 'time_unit': 'ns', 592 'iterations': '1002', 593 'run_name': 'perfName02', 594 'real_time': '11002.11002', 595 'test_name': 'somePerfClass01#perfName02', 596 } 597 trim_perf02_test01 = { 598 'repetition_index': '0', 599 'cpu_time': '20001.20001', 600 'name': 'perfName11', 601 'repetitions': '0', 602 'run_type': 'iteration', 603 'label': '2123', 604 'threads': '1', 605 'time_unit': 'ns', 606 'iterations': '2001', 607 'run_name': 'perfName11', 608 'real_time': '21001.21001', 609 'test_name': 'somePerfClass02#perfName11', 610 } 611 correct_classify_perf_info = { 612 'somePerfClass01': [trim_perf01_test01, trim_perf01_test02], 613 'somePerfClass02': [trim_perf02_test01], 614 } 615 classify_perf_info, max_len = ( 616 self.rr.run_stats.perf_info._classify_perf_info() 617 ) 618 correct_max_len = { 619 'real_time': 11, 620 'cpu_time': 11, 621 'name': 10, 622 'iterations': 9, 623 'time_unit': 2, 624 } 625 self.assertEqual(max_len, correct_max_len) 626 self.assertEqual(classify_perf_info, correct_classify_perf_info) 627 628 def test_print_perf_test_metrics_perf_tests_print_attempted(self): 629 test_infos = [ 630 test_info.TestInfo( 631 'some_module', 632 'TestRunner', 633 set(), 634 compatibility_suites=['performance-tests'], 635 ) 636 ] 637 sut = result_reporter.ResultReporter(test_infos=test_infos) 638 639 is_print_attempted = sut._print_perf_test_metrics() 640 641 self.assertTrue(is_print_attempted) 642 643 def test_print_perf_test_metrics_not_perf_tests_print__not_attempted(self): 644 test_infos = [ 645 test_info.TestInfo( 646 'some_module', 647 'TestRunner', 648 set(), 649 compatibility_suites=['not-perf-test'], 650 ) 651 ] 652 sut = result_reporter.ResultReporter(test_infos=test_infos) 653 654 is_print_attempted = sut._print_perf_test_metrics() 655 656 self.assertFalse(is_print_attempted) 657 658 659if __name__ == '__main__': 660 unittest.main() 661