1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3 4# Copyright 2011 The ChromiumOS Authors 5# Use of this source code is governed by a BSD-style license that can be 6# found in the LICENSE file. 7 8"""Module of result cache unittest.""" 9 10 11import io 12import os 13import pickle 14import re 15import shutil 16import tempfile 17import unittest 18import unittest.mock as mock 19 20from cros_utils import command_executer 21from cros_utils import logger 22from cros_utils import misc 23import image_checksummer 24from label import MockLabel 25import machine_manager 26from results_cache import CacheConditions 27from results_cache import PerfDataReadError 28from results_cache import PidVerificationError 29from results_cache import Result 30from results_cache import ResultsCache 31from results_cache import TelemetryResult 32import test_flag 33 34 35# The following hardcoded string has blocked words replaced, and thus 36# is not representative of a true crosperf output. 37# pylint: disable=line-too-long 38OUTPUT = """CMD (True): ./test_that.sh\ 39 --remote=172.17.128.241 --board=lumpy LibCBench 40CMD (None): cd /usr/local/google/home/yunlian/gd/src/build/images/lumpy/latest/../../../../..; cros_sdk -- ./in_chroot_cmd6X7Cxu.sh 41Identity added: /tmp/test_that.PO1234567/autotest_key (/tmp/test_that.PO1234567/autotest_key) 42INFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest. 43 44INFO : Running the following control files 1 times: 45INFO : * 'client/site_tests/platform_LibCBench/control' 46 47INFO : Running client test client/site_tests/platform_LibCBench/control 48./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/test_that.PO1234567/platform_LibCBench --test-retry=0 --args 49ERROR:root:import statsd failed, no stats will be reported. 5014:20:22 INFO | Results placed in /tmp/test_that.PO1234567/platform_LibCBench 5114:20:22 INFO | Processing control file 5214:20:23 INFO | Starting main ssh connection '/usr/bin/ssh -a -x -N -o ControlMain=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-main/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241' 5314:20:23 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts. 5414:20:23 INFO | INFO\t----\t----\tkernel=3.8.11\tlocaltime=May 22 14:20:23\ttimestamp=1369257623 5514:20:23 INFO | Installing autotest on 172.17.128.241 5614:20:23 INFO | Using installation dir /usr/local/autotest 5714:20:23 WARNI| No job_repo_url for <remote host: 172.17.128.241> 5814:20:23 INFO | Could not install autotest using the packaging system: No repos to install an autotest client from. Trying other methods 5914:20:23 INFO | Installation of autotest completed 6014:20:24 WARNI| No job_repo_url for <remote host: 172.17.128.241> 6114:20:24 INFO | Executing /usr/local/autotest/bin/autotest /usr/local/autotest/control phase 0 6214:20:24 INFO | Entered autotestd_monitor. 6314:20:24 INFO | Finished launching tail subprocesses. 6414:20:24 INFO | Finished waiting on autotestd to start. 6514:20:26 INFO | START\t----\t----\ttimestamp=1369257625\tlocaltime=May 22 14:20:25 6614:20:26 INFO | \tSTART\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257625\tlocaltime=May 22 14:20:25 6714:20:30 INFO | \t\tGOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30\tcompleted successfully 6814:20:30 INFO | \tEND GOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30 6914:20:31 INFO | END GOOD\t----\t----\ttimestamp=1369257630\tlocaltime=May 22 14:20:30 7014:20:31 INFO | Got lock of exit_code_file. 7114:20:31 INFO | Released lock of exit_code_file and closed it. 72OUTPUT: ============================== 73OUTPUT: Current time: 2013-05-22 14:20:32.818831 Elapsed: 0:01:30 ETA: Unknown 74Done: 0% [ ] 75OUTPUT: Thread Status: 76RUNNING: 1 ('ttt: LibCBench (1)' 0:01:21) 77Machine Status: 78Machine Thread Lock Status Checksum 79172.17.128.241 ttt: LibCBench (1) True RUNNING 3ba9f2ecbb222f20887daea5583d86ba 80 81OUTPUT: ============================== 8214:20:33 INFO | Killing child processes. 8314:20:33 INFO | Client complete 8414:20:33 INFO | Finished processing control file 8514:20:33 INFO | Starting main ssh connection '/usr/bin/ssh -a -x -N -o ControlMain=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-main/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241' 8614:20:33 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts. 87 88INFO : Test results: 89------------------------------------------------------------------- 90platform_LibCBench [ PASSED ] 91platform_LibCBench/platform_LibCBench [ PASSED ] 92platform_LibCBench/platform_LibCBench b_malloc_big1__0_ 0.00375231466667 93platform_LibCBench/platform_LibCBench b_malloc_big2__0_ 0.002951359 94platform_LibCBench/platform_LibCBench b_malloc_bubble__0_ 0.015066374 95platform_LibCBench/platform_LibCBench b_malloc_sparse__0_ 0.015053784 96platform_LibCBench/platform_LibCBench b_malloc_thread_local__0_ 0.01138439 97platform_LibCBench/platform_LibCBench b_malloc_thread_stress__0_ 0.0367894733333 98platform_LibCBench/platform_LibCBench b_malloc_tiny1__0_ 0.000768474333333 99platform_LibCBench/platform_LibCBench b_malloc_tiny2__0_ 0.000581407333333 100platform_LibCBench/platform_LibCBench b_pthread_create_serial1__0_ 0.0291785246667 101platform_LibCBench/platform_LibCBench b_pthread_createjoin_serial1__0_ 0.031907936 102platform_LibCBench/platform_LibCBench b_pthread_createjoin_serial2__0_ 0.043485347 103platform_LibCBench/platform_LibCBench b_pthread_uselesslock__0_ 0.0294113346667 104platform_LibCBench/platform_LibCBench b_regex_compile____a_b_c__d_b__ 0.00529833933333 105platform_LibCBench/platform_LibCBench b_regex_search____a_b_c__d_b__ 0.00165455066667 106platform_LibCBench/platform_LibCBench b_regex_search___a_25_b__ 0.0496191923333 107platform_LibCBench/platform_LibCBench b_stdio_putcgetc__0_ 0.100005711667 108platform_LibCBench/platform_LibCBench b_stdio_putcgetc_unlocked__0_ 0.0371443833333 109platform_LibCBench/platform_LibCBench b_string_memset__0_ 0.00275405066667 110platform_LibCBench/platform_LibCBench b_string_strchr__0_ 0.00456903 111platform_LibCBench/platform_LibCBench b_string_strlen__0_ 0.044893587 112platform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac__ 0.118360778 113platform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaac__ 0.068957325 114platform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaacccccccccccc__ 0.0135694476667 115platform_LibCBench/platform_LibCBench b_string_strstr___abcdefghijklmnopqrstuvwxyz__ 0.0134553343333 116platform_LibCBench/platform_LibCBench b_string_strstr___azbycxdwevfugthsirjqkplomn__ 0.0133123556667 117platform_LibCBench/platform_LibCBench b_utf8_bigbuf__0_ 0.0473772253333 118platform_LibCBench/platform_LibCBench b_utf8_onebyone__0_ 0.130938538333 119------------------------------------------------------------------- 120Total PASS: 2/2 (100%) 121 122INFO : Elapsed time: 0m16s 123""" 124 125error = """ 126ERROR: Identity added: /tmp/test_that.Z4Ld/autotest_key (/tmp/test_that.Z4Ld/autotest_key) 127INFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest. 128INFO : Running the following control files 1 times: 129INFO : * 'client/site_tests/platform_LibCBench/control' 130INFO : Running client test client/site_tests/platform_LibCBench/control 131INFO : Test results: 132INFO : Elapsed time: 0m18s 133""" 134 135keyvals = { 136 "": "PASS", 137 "b_stdio_putcgetc__0_": "0.100005711667", 138 "b_string_strstr___azbycxdwevfugthsirjqkplomn__": "0.0133123556667", 139 "b_malloc_thread_local__0_": "0.01138439", 140 "b_string_strlen__0_": "0.044893587", 141 "b_malloc_sparse__0_": "0.015053784", 142 "b_string_memset__0_": "0.00275405066667", 143 "platform_LibCBench": "PASS", 144 "b_pthread_uselesslock__0_": "0.0294113346667", 145 "b_string_strchr__0_": "0.00456903", 146 "b_pthread_create_serial1__0_": "0.0291785246667", 147 "b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac__": "0.118360778", 148 "b_string_strstr___aaaaaaaaaaaaaacccccccccccc__": "0.0135694476667", 149 "b_pthread_createjoin_serial1__0_": "0.031907936", 150 "b_malloc_thread_stress__0_": "0.0367894733333", 151 "b_regex_search____a_b_c__d_b__": "0.00165455066667", 152 "b_malloc_bubble__0_": "0.015066374", 153 "b_malloc_big2__0_": "0.002951359", 154 "b_stdio_putcgetc_unlocked__0_": "0.0371443833333", 155 "b_pthread_createjoin_serial2__0_": "0.043485347", 156 "b_regex_search___a_25_b__": "0.0496191923333", 157 "b_utf8_bigbuf__0_": "0.0473772253333", 158 "b_malloc_big1__0_": "0.00375231466667", 159 "b_regex_compile____a_b_c__d_b__": "0.00529833933333", 160 "b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaac__": "0.068957325", 161 "b_malloc_tiny2__0_": "0.000581407333333", 162 "b_utf8_onebyone__0_": "0.130938538333", 163 "b_malloc_tiny1__0_": "0.000768474333333", 164 "b_string_strstr___abcdefghijklmnopqrstuvwxyz__": "0.0134553343333", 165} 166 167PERF_DATA_HEADER = """ 168# ======== 169# captured on : Thu Jan 01 00:00:00 1980 170# header version : 1 171# data offset : 536 172# data size : 737678672 173# feat offset : 737679208 174# hostname : localhost 175# os release : 5.4.61 176# perf version : 177# arch : aarch64 178# nrcpus online : 8 179# nrcpus avail : 8 180# total memory : 5911496 kB 181# cmdline : /usr/bin/perf record -e instructions -p {pid} 182# event : name = instructions, , id = ( 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193 ), type = 8, size = 112 183# event : name = placeholder:u, , id = ( 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204 ), type = 1, size = 112, config = 0x9 184# CPU_TOPOLOGY info available, use -I to display 185# pmu mappings: software = 1, uprobe = 6, cs_etm = 8, breakpoint = 5, tracepoint = 2, armv8_pmuv3 = 7 186# contains AUX area data (e.g. instruction trace) 187# time of first sample : 0.000000 188# time of last sample : 0.000000 189# sample duration : 0.000 ms 190# missing features: TRACING_DATA CPUDESC CPUID NUMA_TOPOLOGY BRANCH_STACK GROUP_DESC STAT CACHE MEM_TOPOLOGY CLOCKID DIR_FORMAT 191# ======== 192# 193""" 194 195TURBOSTAT_LOG_OUTPUT = """CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 196- 329 12.13 2723 2393 10975 77 1970 336 12.41 2715 2393 6328 77 1982 323 11.86 2731 2393 4647 69 199CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 200- 1940 67.46 2884 2393 39920 83 2010 1827 63.70 2877 2393 21184 83 2022 2053 71.22 2891 2393 18736 67 203CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 204- 1927 66.02 2927 2393 48946 84 2050 1880 64.47 2925 2393 24457 84 2062 1973 67.57 2928 2393 24489 69 207CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 208- 1899 64.84 2937 2393 42540 72 2090 2135 72.82 2940 2393 23615 65 2102 1663 56.85 2934 2393 18925 72 211CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 212- 1908 65.24 2932 2393 43172 75 2130 1876 64.25 2928 2393 20743 75 2142 1939 66.24 2936 2393 22429 69 215CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 216- 1553 53.12 2933 2393 35488 46 2170 1484 50.80 2929 2393 18246 46 2182 1623 55.44 2936 2393 17242 45 219CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp 220- 843 29.83 2832 2393 28161 47 2210 827 29.35 2826 2393 16093 47 2222 858 30.31 2838 2393 12068 46 223""" 224TURBOSTAT_DATA = { 225 "cpufreq": {"all": [2723, 2884, 2927, 2937, 2932, 2933, 2832]}, 226 "cputemp": {"all": [77, 83, 84, 72, 75, 46, 47]}, 227} 228 229TOP_LOG = """ 230 PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND 231 4102 chronos 12 -8 3454472 238300 118188 R 41.8 6.1 0:08.37 chrome 232 4204 chronos 12 -8 2492716 205728 179016 S 11.8 5.3 0:03.89 chrome 233 4890 root 20 0 3396 2064 1596 R 11.8 0.1 0:00.03 top 234 375 root 0 -20 0 0 0 S 5.9 0.0 0:00.17 kworker/u13 235 617 syslog 20 0 25332 8372 7888 S 5.9 0.2 0:00.77 sys-journal 236 237 PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND 238 5745 chronos 20 0 5438580 139328 67988 R 122.8 3.6 0:04.26 chrome 239 912 root -51 0 0 0 0 S 2.0 0.0 0:01.04 irq/cros-ec 240 121 root 20 0 0 0 0 S 1.0 0.0 0:00.45 spi5 241 4811 root 20 0 6808 4084 3492 S 1.0 0.1 0:00.02 sshd 242 4890 root 20 0 3364 2148 1596 R 1.0 0.1 0:00.36 top 243 5205 chronos 12 -8 3673780 240928 130864 S 1.0 6.2 0:07.30 chrome 244 245 246 PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND 247 5745 chronos 20 0 5434484 139432 63892 R 107.9 3.6 0:05.35 chrome 248 5713 chronos 20 0 5178652 103120 50372 S 17.8 2.6 0:01.13 chrome 249 7 root 20 0 0 0 0 S 1.0 0.0 0:00.73 rcu_preempt 250 855 root 20 0 0 0 0 S 1.0 0.0 0:00.01 kworker/4:2 251""" 252TOP_DATA = [ 253 { 254 "cmd": "chrome-5745", 255 "cpu_use_avg": 115.35, 256 "count": 2, 257 "top5_cpu_use": [122.8, 107.9], 258 }, 259 { 260 "cmd": "chrome-5713", 261 "cpu_use_avg": 8.9, 262 "count": 1, 263 "top5_cpu_use": [17.8], 264 }, 265 { 266 "cmd": "irq/cros-ec-912", 267 "cpu_use_avg": 1.0, 268 "count": 1, 269 "top5_cpu_use": [2.0], 270 }, 271 { 272 "cmd": "chrome-5205", 273 "cpu_use_avg": 0.5, 274 "count": 1, 275 "top5_cpu_use": [1.0], 276 }, 277 { 278 "cmd": "spi5-121", 279 "cpu_use_avg": 0.5, 280 "count": 1, 281 "top5_cpu_use": [1.0], 282 }, 283 { 284 "cmd": "sshd-4811", 285 "cpu_use_avg": 0.5, 286 "count": 1, 287 "top5_cpu_use": [1.0], 288 }, 289 { 290 "cmd": "rcu_preempt-7", 291 "cpu_use_avg": 0.5, 292 "count": 1, 293 "top5_cpu_use": [1.0], 294 }, 295 { 296 "cmd": "kworker/4:2-855", 297 "cpu_use_avg": 0.5, 298 "count": 1, 299 "top5_cpu_use": [1.0], 300 }, 301] 302TOP_OUTPUT = """ COMMAND AVG CPU% SEEN HIGHEST 5 303 chrome 128.250000 6 [122.8, 107.9, 17.8, 5.0, 2.0] 304 irq/230-cros-ec 1.000000 1 [2.0] 305 sshd 0.500000 1 [1.0] 306 irq/231-cros-ec 0.500000 1 [1.0] 307 spi5 0.500000 1 [1.0] 308 rcu_preempt 0.500000 1 [1.0] 309 kworker/4:2 0.500000 1 [1.0] 310""" 311 312CPUSTATS_UNIQ_OUTPUT = """ 313/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000 314/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1512000 315/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000 316soc-thermal 44444 317little-cpu 41234 318big-cpu 51234 319/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1500000 320/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1600000 321/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2012000 322soc-thermal 45456 323little-cpu 42555 324big-cpu 61724 325""" 326CPUSTATS_UNIQ_DATA = { 327 "cpufreq": { 328 "cpu0": [1512, 1500], 329 "cpu1": [1512, 1600], 330 "cpu3": [2016, 2012], 331 }, 332 "cputemp": { 333 "soc-thermal": [44.4, 45.5], 334 "little-cpu": [41.2, 42.6], 335 "big-cpu": [51.2, 61.7], 336 }, 337} 338CPUSTATS_DUPL_OUTPUT = """ 339/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000 340/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1512000 341/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1512000 342/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000 343/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1500000 344/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1500000 345/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1500000 346/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000 347/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1614000 348/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1614000 349/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1614000 350/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 1982000 351""" 352CPUSTATS_DUPL_DATA = { 353 "cpufreq": {"cpu0": [1512, 1500, 1614], "cpu3": [2016, 2016, 1982]}, 354} 355 356TMP_DIR1 = "/tmp/tmpAbcXyz" 357 358HISTOGRAMSET = """ 359[ 360 { 361 "values": [ 362 "cache_temperature_cold", 363 "typical", 364 "cache_temperature:cold" 365 ], 366 "guid": "db6d463b-7c07-4873-b839-db0652ccb97e", 367 "type": "GenericSet" 368 }, 369 { 370 "values": [ 371 "cache_temperature_warm", 372 "typical", 373 "cache_temperature:warm" 374 ], 375 "guid": "a270eb9d-3bb0-472a-951d-74ac3398b718", 376 "type": "GenericSet" 377 }, 378 { 379 "sampleValues": [ 380 1111.672 381 ], 382 "name": "timeToFirstContentfulPaint", 383 "diagnostics": { 384 "storyTags": "a270eb9d-3bb0-472a-951d-74ac3398b718" 385 }, 386 "unit": "ms_smallerIsBetter" 387 }, 388 { 389 "sampleValues": [ 390 1146.459 391 ], 392 "name": "timeToFirstContentfulPaint", 393 "diagnostics": { 394 "storyTags": "db6d463b-7c07-4873-b839-db0652ccb97e" 395 }, 396 "unit": "ms_smallerIsBetter" 397 }, 398 { 399 "sampleValues": [ 400 888.328 401 ], 402 "name": "timeToFirstContentfulPaint", 403 "diagnostics": { 404 "storyTags": "a270eb9d-3bb0-472a-951d-74ac3398b718" 405 }, 406 "unit": "ms_smallerIsBetter" 407 }, 408 { 409 "sampleValues": [ 410 853.541 411 ], 412 "name": "timeToFirstContentfulPaint", 413 "diagnostics": { 414 "storyTags": "db6d463b-7c07-4873-b839-db0652ccb97e" 415 }, 416 "unit": "ms_smallerIsBetter" 417 }, 418 { 419 "sampleValues": [ 420 400.000 421 ], 422 "name": "timeToFirstContentfulPaint", 423 "diagnostics": { 424 "storyTags": "a270eb9d-3bb0-472a-951d-74ac3398b718" 425 }, 426 "unit": "ms_smallerIsBetter" 427 } 428 429] 430""" 431 432# pylint: enable=line-too-long 433 434 435class MockResult(Result): 436 """Mock result class.""" 437 438 def __init__(self, mylogger, label, logging_level, machine): 439 super(MockResult, self).__init__( 440 mylogger, label, logging_level, machine 441 ) 442 443 def FindFilesInResultsDir(self, find_args): 444 return "" 445 446 # pylint: disable=arguments-differ 447 def GetKeyvals(self, temp=False): 448 if temp: 449 pass 450 return keyvals 451 452 453class RegexMatcher: 454 """A regex matcher, for passing to mocks.""" 455 456 def __init__(self, regex): 457 self._regex = re.compile(regex) 458 459 def __eq__(self, string): 460 return self._regex.search(string) is not None 461 462 463class ResultTest(unittest.TestCase): 464 """Result test class.""" 465 466 def __init__(self, *args, **kwargs): 467 super(ResultTest, self).__init__(*args, **kwargs) 468 self.callFakeProcessResults = False 469 self.fakeCacheReturnResult = None 470 self.callGetResultsDir = False 471 self.callProcessResults = False 472 self.callGetPerfReportFiles = False 473 self.kv_dict = None 474 self.tmpdir = "" 475 self.callGetNewKeyvals = False 476 self.callGetResultsFile = False 477 self.callGetPerfDataFiles = False 478 self.callGetTurbostatFile = False 479 self.callGetCpustatsFile = False 480 self.callGetTopFile = False 481 self.callGetCpuinfoFile = False 482 self.callGetWaitTimeFile = False 483 self.args = None 484 self.callGatherPerfResults = False 485 self.mock_logger = mock.Mock(spec=logger.Logger) 486 self.mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter) 487 self.mock_label = MockLabel( 488 "mock_label", 489 "build", 490 "chromeos_image", 491 "autotest_dir", 492 "debug_dir", 493 "/tmp", 494 "lumpy", 495 "remote", 496 "image_args", 497 "cache_dir", 498 "average", 499 "gcc", 500 False, 501 None, 502 ) 503 504 @mock.patch.object(os.path, "exists") 505 def testCreateFromRun(self, mock_path_exists): 506 mock_path_exists.side_effect = lambda x: x != "/etc/cros_chroot_version" 507 result = MockResult.CreateFromRun( 508 logger.GetLogger(), 509 "average", 510 self.mock_label, 511 "remote1", 512 OUTPUT, 513 error, 514 0, 515 True, 516 ) 517 self.assertEqual(result.keyvals, keyvals) 518 self.assertEqual( 519 result.chroot_results_dir, 520 "/tmp/test_that.PO1234567/platform_LibCBench", 521 ) 522 self.assertEqual( 523 result.results_dir, 524 RegexMatcher("/tmp/.*tmp/test_that.PO1234567/platform_LibCBench"), 525 ) 526 self.assertEqual(result.retval, 0) 527 528 def setUp(self): 529 self.result = Result( 530 self.mock_logger, self.mock_label, "average", self.mock_cmd_exec 531 ) 532 self.result.chromeos_root = "/tmp/chromeos" 533 self.orig_exists = os.path.exists 534 535 @mock.patch.object(os.path, "isdir") 536 @mock.patch.object(command_executer.CommandExecuter, "RunCommand") 537 @mock.patch.object(command_executer.CommandExecuter, "CopyFiles") 538 def test_copy_files_to(self, mock_copyfiles, mock_runcmd, mock_isdir): 539 files = ["src_file_1", "src_file_2", "src_file_3"] 540 dest_dir = "/tmp/test" 541 self.mock_cmd_exec.RunCommand = mock_runcmd 542 self.mock_cmd_exec.CopyFiles = mock_copyfiles 543 544 mock_copyfiles.return_value = 0 545 546 # test 1. dest_dir exists; CopyFiles returns 0. 547 mock_isdir.return_value = True 548 self.result.CopyFilesTo(dest_dir, files) 549 self.assertEqual(mock_runcmd.call_count, 0) 550 self.assertEqual(mock_copyfiles.call_count, 3) 551 first_args = mock_copyfiles.call_args_list[0][0] 552 second_args = mock_copyfiles.call_args_list[1][0] 553 third_args = mock_copyfiles.call_args_list[2][0] 554 self.assertEqual(first_args, ("src_file_1", "/tmp/test/src_file_1.0")) 555 self.assertEqual(second_args, ("src_file_2", "/tmp/test/src_file_2.1")) 556 self.assertEqual(third_args, ("src_file_3", "/tmp/test/src_file_3.2")) 557 558 mock_runcmd.reset_mock() 559 mock_copyfiles.reset_mock() 560 # test 2. dest_dir does not exist; CopyFiles returns 0. 561 mock_isdir.return_value = False 562 self.result.CopyFilesTo(dest_dir, files) 563 self.assertEqual(mock_runcmd.call_count, 3) 564 self.assertEqual(mock_copyfiles.call_count, 3) 565 self.assertEqual( 566 mock_runcmd.call_args_list[0], mock_runcmd.call_args_list[1] 567 ) 568 self.assertEqual( 569 mock_runcmd.call_args_list[0], mock_runcmd.call_args_list[2] 570 ) 571 self.assertEqual( 572 mock_runcmd.call_args_list[0][0], ("mkdir -p /tmp/test",) 573 ) 574 575 # test 3. CopyFiles returns 1 (fails). 576 mock_copyfiles.return_value = 1 577 self.assertRaises(Exception, self.result.CopyFilesTo, dest_dir, files) 578 579 @mock.patch.object(Result, "CopyFilesTo") 580 def test_copy_results_to(self, mockCopyFilesTo): 581 results_file = [ 582 "/tmp/result.json.0", 583 "/tmp/result.json.1", 584 "/tmp/result.json.2", 585 ] 586 perf_data_files = [ 587 "/tmp/perf.data.0", 588 "/tmp/perf.data.1", 589 "/tmp/perf.data.2", 590 ] 591 perf_report_files = [ 592 "/tmp/perf.report.0", 593 "/tmp/perf.report.1", 594 "/tmp/perf.report.2", 595 ] 596 597 self.result.results_file = results_file 598 self.result.perf_data_files = perf_data_files 599 self.result.perf_report_files = perf_report_files 600 601 self.result.CopyFilesTo = mockCopyFilesTo 602 self.result.CopyResultsTo("/tmp/results/") 603 self.assertEqual(mockCopyFilesTo.call_count, 3) 604 self.assertEqual(len(mockCopyFilesTo.call_args_list), 3) 605 self.assertEqual( 606 mockCopyFilesTo.call_args_list[0][0], 607 ("/tmp/results/", results_file), 608 ) 609 self.assertEqual( 610 mockCopyFilesTo.call_args_list[1][0], 611 ("/tmp/results/", perf_data_files), 612 ) 613 self.assertEqual( 614 mockCopyFilesTo.call_args_list[2][0], 615 ("/tmp/results/", perf_report_files), 616 ) 617 618 def test_get_new_keyvals(self): 619 kv_dict = {} 620 621 def FakeGetDataMeasurementsFiles(): 622 filename = os.path.join(os.getcwd(), "unittest_keyval_file.txt") 623 return [filename] 624 625 self.result.GetDataMeasurementsFiles = FakeGetDataMeasurementsFiles 626 kv_dict2, udict = self.result.GetNewKeyvals(kv_dict) 627 self.assertEqual( 628 kv_dict2, 629 { 630 "Box2D__Box2D": 4775, 631 "Mandreel__Mandreel": 6620, 632 "Gameboy__Gameboy": 9901, 633 "Crypto__Crypto": 8737, 634 "telemetry_page_measurement_results__num_errored": 0, 635 "telemetry_page_measurement_results__num_failed": 0, 636 "PdfJS__PdfJS": 6455, 637 "Total__Score": 7918, 638 "EarleyBoyer__EarleyBoyer": 14340, 639 "MandreelLatency__MandreelLatency": 5188, 640 "CodeLoad__CodeLoad": 6271, 641 "DeltaBlue__DeltaBlue": 14401, 642 "Typescript__Typescript": 9815, 643 "SplayLatency__SplayLatency": 7653, 644 "zlib__zlib": 16094, 645 "Richards__Richards": 10358, 646 "RegExp__RegExp": 1765, 647 "NavierStokes__NavierStokes": 9815, 648 "Splay__Splay": 4425, 649 "RayTrace__RayTrace": 16600, 650 }, 651 ) 652 self.assertEqual( 653 udict, 654 { 655 "Box2D__Box2D": "score", 656 "Mandreel__Mandreel": "score", 657 "Gameboy__Gameboy": "score", 658 "Crypto__Crypto": "score", 659 "telemetry_page_measurement_results__num_errored": "count", 660 "telemetry_page_measurement_results__num_failed": "count", 661 "PdfJS__PdfJS": "score", 662 "Total__Score": "score", 663 "EarleyBoyer__EarleyBoyer": "score", 664 "MandreelLatency__MandreelLatency": "score", 665 "CodeLoad__CodeLoad": "score", 666 "DeltaBlue__DeltaBlue": "score", 667 "Typescript__Typescript": "score", 668 "SplayLatency__SplayLatency": "score", 669 "zlib__zlib": "score", 670 "Richards__Richards": "score", 671 "RegExp__RegExp": "score", 672 "NavierStokes__NavierStokes": "score", 673 "Splay__Splay": "score", 674 "RayTrace__RayTrace": "score", 675 }, 676 ) 677 678 def test_append_telemetry_units(self): 679 kv_dict = { 680 "Box2D__Box2D": 4775, 681 "Mandreel__Mandreel": 6620, 682 "Gameboy__Gameboy": 9901, 683 "Crypto__Crypto": 8737, 684 "PdfJS__PdfJS": 6455, 685 "Total__Score": 7918, 686 "EarleyBoyer__EarleyBoyer": 14340, 687 "MandreelLatency__MandreelLatency": 5188, 688 "CodeLoad__CodeLoad": 6271, 689 "DeltaBlue__DeltaBlue": 14401, 690 "Typescript__Typescript": 9815, 691 "SplayLatency__SplayLatency": 7653, 692 "zlib__zlib": 16094, 693 "Richards__Richards": 10358, 694 "RegExp__RegExp": 1765, 695 "NavierStokes__NavierStokes": 9815, 696 "Splay__Splay": 4425, 697 "RayTrace__RayTrace": 16600, 698 } 699 units_dict = { 700 "Box2D__Box2D": "score", 701 "Mandreel__Mandreel": "score", 702 "Gameboy__Gameboy": "score", 703 "Crypto__Crypto": "score", 704 "PdfJS__PdfJS": "score", 705 "Total__Score": "score", 706 "EarleyBoyer__EarleyBoyer": "score", 707 "MandreelLatency__MandreelLatency": "score", 708 "CodeLoad__CodeLoad": "score", 709 "DeltaBlue__DeltaBlue": "score", 710 "Typescript__Typescript": "score", 711 "SplayLatency__SplayLatency": "score", 712 "zlib__zlib": "score", 713 "Richards__Richards": "score", 714 "RegExp__RegExp": "score", 715 "NavierStokes__NavierStokes": "score", 716 "Splay__Splay": "score", 717 "RayTrace__RayTrace": "score", 718 } 719 720 results_dict = self.result.AppendTelemetryUnits(kv_dict, units_dict) 721 self.assertEqual( 722 results_dict, 723 { 724 "Box2D__Box2D": [4775, "score"], 725 "Splay__Splay": [4425, "score"], 726 "Gameboy__Gameboy": [9901, "score"], 727 "Crypto__Crypto": [8737, "score"], 728 "PdfJS__PdfJS": [6455, "score"], 729 "Total__Score": [7918, "score"], 730 "EarleyBoyer__EarleyBoyer": [14340, "score"], 731 "MandreelLatency__MandreelLatency": [5188, "score"], 732 "DeltaBlue__DeltaBlue": [14401, "score"], 733 "SplayLatency__SplayLatency": [7653, "score"], 734 "Mandreel__Mandreel": [6620, "score"], 735 "Richards__Richards": [10358, "score"], 736 "zlib__zlib": [16094, "score"], 737 "CodeLoad__CodeLoad": [6271, "score"], 738 "Typescript__Typescript": [9815, "score"], 739 "RegExp__RegExp": [1765, "score"], 740 "RayTrace__RayTrace": [16600, "score"], 741 "NavierStokes__NavierStokes": [9815, "score"], 742 }, 743 ) 744 745 @mock.patch.object(misc, "GetInsideChrootPath") 746 @mock.patch.object(tempfile, "mkdtemp") 747 @mock.patch.object(command_executer.CommandExecuter, "RunCommand") 748 @mock.patch.object( 749 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 750 ) 751 def test_get_keyvals( 752 self, mock_chrootruncmd, mock_runcmd, mock_mkdtemp, mock_getpath 753 ): 754 self.kv_dict = {} 755 self.callGetNewKeyvals = False 756 757 def reset(): 758 self.kv_dict = {} 759 self.callGetNewKeyvals = False 760 mock_chrootruncmd.reset_mock() 761 mock_runcmd.reset_mock() 762 mock_mkdtemp.reset_mock() 763 mock_getpath.reset_mock() 764 765 def FakeGetNewKeyvals(kv_dict): 766 self.kv_dict = kv_dict 767 self.callGetNewKeyvals = True 768 return_kvdict = {"first_time": 680, "Total": 10} 769 return_udict = {"first_time": "ms", "Total": "score"} 770 return return_kvdict, return_udict 771 772 mock_mkdtemp.return_value = TMP_DIR1 773 mock_chrootruncmd.return_value = [ 774 "", 775 ("%s,PASS\n%s/telemetry_Crosperf,PASS\n") % (TMP_DIR1, TMP_DIR1), 776 "", 777 ] 778 mock_getpath.return_value = TMP_DIR1 779 self.result.ce.ChrootRunCommandWOutput = mock_chrootruncmd 780 self.result.ce.RunCommand = mock_runcmd 781 self.result.GetNewKeyvals = FakeGetNewKeyvals 782 self.result.suite = "telemetry_Crosperf" 783 self.result.results_dir = "/tmp/test_that_resultsNmq" 784 785 # Test 1. no self.temp_dir. 786 res = self.result.GetKeyvals() 787 self.assertTrue(self.callGetNewKeyvals) 788 self.assertEqual( 789 self.kv_dict, {"": "PASS", "telemetry_Crosperf": "PASS"} 790 ) 791 self.assertEqual(mock_runcmd.call_count, 1) 792 self.assertEqual( 793 mock_runcmd.call_args_list[0][0], 794 ("cp -r /tmp/test_that_resultsNmq/* %s" % TMP_DIR1,), 795 ) 796 self.assertEqual(mock_chrootruncmd.call_count, 1) 797 self.assertEqual( 798 mock_chrootruncmd.call_args_list[0][0], 799 ( 800 self.result.chromeos_root, 801 ("./generate_test_report --no-color --csv %s") % TMP_DIR1, 802 ), 803 ) 804 self.assertEqual(mock_getpath.call_count, 1) 805 self.assertEqual(mock_mkdtemp.call_count, 1) 806 self.assertEqual( 807 res, {"Total": [10, "score"], "first_time": [680, "ms"]} 808 ) 809 810 # Test 2. self.temp_dir 811 reset() 812 mock_chrootruncmd.return_value = [ 813 "", 814 ( 815 "/tmp/tmpJCajRG,PASS\n/tmp/tmpJCajRG/" 816 "telemetry_Crosperf,PASS\n" 817 ), 818 "", 819 ] 820 mock_getpath.return_value = "/tmp/tmpJCajRG" 821 self.result.temp_dir = "/tmp/tmpJCajRG" 822 res = self.result.GetKeyvals() 823 self.assertEqual(mock_runcmd.call_count, 0) 824 self.assertEqual(mock_mkdtemp.call_count, 0) 825 self.assertEqual(mock_chrootruncmd.call_count, 1) 826 self.assertTrue(self.callGetNewKeyvals) 827 self.assertEqual( 828 self.kv_dict, {"": "PASS", "telemetry_Crosperf": "PASS"} 829 ) 830 self.assertEqual( 831 res, {"Total": [10, "score"], "first_time": [680, "ms"]} 832 ) 833 834 # Test 3. suite != telemetry_Crosperf. Normally this would be for 835 # running non-Telemetry autotests, such as BootPerfServer. In this test 836 # case, the keyvals we have set up were returned from a Telemetry test 837 # run; so this pass is basically testing that we don't append the units 838 # to the test results (which we do for Telemetry autotest runs). 839 reset() 840 self.result.suite = "" 841 res = self.result.GetKeyvals() 842 self.assertEqual(res, {"Total": 10, "first_time": 680}) 843 844 @mock.patch.object(misc, "GetInsideChrootPath") 845 @mock.patch.object( 846 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 847 ) 848 @mock.patch.object(os.path, "exists") 849 def test_get_samples( 850 self, mock_exists, mock_get_total_samples, mock_getpath 851 ): 852 self.result.perf_data_files = ["/tmp/results/perf.data"] 853 self.result.board = "samus" 854 self.result.cwp_dso = "kallsyms" 855 mock_getpath.return_value = "/usr/chromeos/chroot/tmp/results/perf.data" 856 mock_get_total_samples.return_value = [ 857 "", 858 ( 859 "45.42% 53721 chrome \n" 860 "10.01% 12345 [kernel.kallsyms] \n" 861 "1.42% 1234 ssh " 862 ), 863 "", 864 ] 865 mock_exists.return_value = True 866 867 # mock_open does not seem to support iteration. 868 # pylint: disable=line-too-long 869 content = """1.63% 66 dav1d-tile chrome [.] decode_coefs 870 1.48% 60 swapper [kernel.kallsyms] [k] intel_idle 871 1.16% 47 dav1d-tile chrome [.] decode_sb""" 872 873 with mock.patch("builtins.open", return_value=io.StringIO(content)): 874 samples = self.result.GetSamples() 875 self.assertEqual(samples, [12345 - 60, "samples"]) 876 877 def test_get_results_dir(self): 878 self.result.out = "" 879 self.assertRaises(Exception, self.result.GetResultsDir) 880 881 self.result.out = OUTPUT 882 resdir = self.result.GetResultsDir() 883 self.assertEqual(resdir, "/tmp/test_that.PO1234567/platform_LibCBench") 884 885 @mock.patch.object(command_executer.CommandExecuter, "RunCommandGeneric") 886 def test_find_files_in_results_dir(self, mock_runcmd): 887 self.result.results_dir = None 888 res = self.result.FindFilesInResultsDir("-name perf.data") 889 self.assertEqual(res, "") 890 891 self.result.ce.RunCommand = mock_runcmd 892 self.result.results_dir = "/tmp/test_results" 893 mock_runcmd.return_value = [0, "/tmp/test_results/perf.data", ""] 894 res = self.result.FindFilesInResultsDir("-name perf.data") 895 self.assertEqual(mock_runcmd.call_count, 1) 896 self.assertEqual( 897 mock_runcmd.call_args_list[0][0], 898 ("find /tmp/test_results -name perf.data",), 899 ) 900 self.assertEqual(res, "/tmp/test_results/perf.data") 901 902 mock_runcmd.reset_mock() 903 mock_runcmd.return_value = [1, "", ""] 904 self.assertRaises( 905 Exception, self.result.FindFilesInResultsDir, "-name perf.data" 906 ) 907 908 @mock.patch.object(Result, "FindFilesInResultsDir") 909 def test_get_perf_data_files(self, mock_findfiles): 910 self.args = None 911 912 mock_findfiles.return_value = "line1\nline1\n" 913 self.result.FindFilesInResultsDir = mock_findfiles 914 res = self.result.GetPerfDataFiles() 915 self.assertEqual(res, ["line1", "line1"]) 916 self.assertEqual( 917 mock_findfiles.call_args_list[0][0], ("-name perf.data",) 918 ) 919 920 def test_get_perf_report_files(self): 921 self.args = None 922 923 def FakeFindFiles(find_args): 924 self.args = find_args 925 return "line1\nline1\n" 926 927 self.result.FindFilesInResultsDir = FakeFindFiles 928 res = self.result.GetPerfReportFiles() 929 self.assertEqual(res, ["line1", "line1"]) 930 self.assertEqual(self.args, "-name perf.data.report") 931 932 def test_get_data_measurement_files(self): 933 self.args = None 934 935 def FakeFindFiles(find_args): 936 self.args = find_args 937 return "line1\nline1\n" 938 939 self.result.FindFilesInResultsDir = FakeFindFiles 940 res = self.result.GetDataMeasurementsFiles() 941 self.assertEqual(res, ["line1", "line1"]) 942 self.assertEqual(self.args, "-name perf_measurements") 943 944 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 945 def test_get_turbostat_file_finds_single_log(self, mock_runcmd): 946 """Expected behavior when a single log file found.""" 947 self.result.results_dir = "/tmp/test_results" 948 self.result.ce.RunCommandWOutput = mock_runcmd 949 mock_runcmd.return_value = (0, "some/long/path/turbostat.log", "") 950 found_single_log = self.result.GetTurbostatFile() 951 self.assertEqual(found_single_log, "some/long/path/turbostat.log") 952 953 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 954 def test_get_turbostat_file_finds_multiple_logs(self, mock_runcmd): 955 """Error case when multiple files found.""" 956 self.result.results_dir = "/tmp/test_results" 957 self.result.ce.RunCommandWOutput = mock_runcmd 958 mock_runcmd.return_value = ( 959 0, 960 "some/long/path/turbostat.log\nturbostat.log", 961 "", 962 ) 963 found_first_logs = self.result.GetTurbostatFile() 964 self.assertEqual(found_first_logs, "some/long/path/turbostat.log") 965 966 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 967 def test_get_turbostat_file_finds_no_logs(self, mock_runcmd): 968 """Error case when no log file found.""" 969 self.result.results_dir = "/tmp/test_results" 970 self.result.ce.RunCommandWOutput = mock_runcmd 971 mock_runcmd.return_value = (0, "", "") 972 found_no_logs = self.result.GetTurbostatFile() 973 self.assertEqual(found_no_logs, "") 974 975 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 976 def test_get_turbostat_file_with_failing_find(self, mock_runcmd): 977 """Error case when file search returns an error.""" 978 self.result.results_dir = "/tmp/test_results" 979 mock_runcmd.return_value = (-1, "", "error") 980 with self.assertRaises(RuntimeError): 981 self.result.GetTurbostatFile() 982 983 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 984 def test_get_top_file_finds_single_log(self, mock_runcmd): 985 """Expected behavior when a single top log file found.""" 986 self.result.results_dir = "/tmp/test_results" 987 self.result.ce.RunCommandWOutput = mock_runcmd 988 mock_runcmd.return_value = (0, "some/long/path/top.log", "") 989 found_single_log = self.result.GetTopFile() 990 self.assertEqual(found_single_log, "some/long/path/top.log") 991 992 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 993 def test_get_top_file_finds_multiple_logs(self, mock_runcmd): 994 """The case when multiple top files found.""" 995 self.result.results_dir = "/tmp/test_results" 996 self.result.ce.RunCommandWOutput = mock_runcmd 997 mock_runcmd.return_value = (0, "some/long/path/top.log\ntop.log", "") 998 found_first_logs = self.result.GetTopFile() 999 self.assertEqual(found_first_logs, "some/long/path/top.log") 1000 1001 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 1002 def test_get_top_file_finds_no_logs(self, mock_runcmd): 1003 """Error case when no log file found.""" 1004 self.result.results_dir = "/tmp/test_results" 1005 self.result.ce.RunCommandWOutput = mock_runcmd 1006 mock_runcmd.return_value = (0, "", "") 1007 found_no_logs = self.result.GetTopFile() 1008 self.assertEqual(found_no_logs, "") 1009 1010 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 1011 def test_get_cpuinfo_file_finds_single_log(self, mock_runcmd): 1012 """Expected behavior when a single cpuinfo file found.""" 1013 self.result.results_dir = "/tmp/test_results" 1014 self.result.ce.RunCommandWOutput = mock_runcmd 1015 mock_runcmd.return_value = (0, "some/long/path/cpuinfo.log", "") 1016 found_single_log = self.result.GetCpuinfoFile() 1017 self.assertEqual(found_single_log, "some/long/path/cpuinfo.log") 1018 1019 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 1020 def test_get_cpustats_file_finds_single_log(self, mock_runcmd): 1021 """Expected behavior when a single log file found.""" 1022 self.result.results_dir = "/tmp/test_results" 1023 self.result.ce.RunCommandWOutput = mock_runcmd 1024 mock_runcmd.return_value = (0, "some/long/path/cpustats.log", "") 1025 found_single_log = self.result.GetCpustatsFile() 1026 self.assertEqual(found_single_log, "some/long/path/cpustats.log") 1027 1028 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 1029 def test_get_cpustats_file_finds_multiple_logs(self, mock_runcmd): 1030 """The case when multiple files found.""" 1031 self.result.results_dir = "/tmp/test_results" 1032 self.result.ce.RunCommandWOutput = mock_runcmd 1033 mock_runcmd.return_value = ( 1034 0, 1035 "some/long/path/cpustats.log\ncpustats.log", 1036 "", 1037 ) 1038 found_first_logs = self.result.GetCpustatsFile() 1039 self.assertEqual(found_first_logs, "some/long/path/cpustats.log") 1040 1041 @mock.patch.object(command_executer.CommandExecuter, "RunCommandWOutput") 1042 def test_get_cpustats_file_finds_no_logs(self, mock_runcmd): 1043 """Error case when no log file found.""" 1044 self.result.results_dir = "/tmp/test_results" 1045 self.result.ce.RunCommandWOutput = mock_runcmd 1046 mock_runcmd.return_value = (0, "", "") 1047 found_no_logs = self.result.GetCpustatsFile() 1048 self.assertEqual(found_no_logs, "") 1049 1050 def test_verify_perf_data_pid_ok(self): 1051 """Verify perf PID which is present in TOP_DATA.""" 1052 self.result.top_cmds = TOP_DATA 1053 # pid is present in TOP_DATA. 1054 with mock.patch.object( 1055 Result, "ReadPidFromPerfData", return_value=["5713"] 1056 ): 1057 self.result.VerifyPerfDataPID() 1058 1059 def test_verify_perf_data_pid_fail(self): 1060 """Test perf PID missing in top raises the error.""" 1061 self.result.top_cmds = TOP_DATA 1062 # pid is not in the list of top processes. 1063 with mock.patch.object( 1064 Result, "ReadPidFromPerfData", return_value=["9999"] 1065 ): 1066 with self.assertRaises(PidVerificationError): 1067 self.result.VerifyPerfDataPID() 1068 1069 @mock.patch.object( 1070 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1071 ) 1072 def test_read_pid_from_perf_data_ok(self, mock_runcmd): 1073 """Test perf header parser, normal flow.""" 1074 self.result.ce.ChrootRunCommandWOutput = mock_runcmd 1075 self.result.perf_data_files = [ 1076 "/tmp/chromeos/chroot/tmp/results/perf.data" 1077 ] 1078 exp_pid = "12345" 1079 mock_runcmd.return_value = (0, PERF_DATA_HEADER.format(pid=exp_pid), "") 1080 pids = self.result.ReadPidFromPerfData() 1081 self.assertEqual(pids, [exp_pid]) 1082 1083 @mock.patch.object( 1084 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1085 ) 1086 def test_read_pid_from_perf_data_mult_profiles(self, mock_runcmd): 1087 """Test multiple perf.data files with PID.""" 1088 self.result.ce.ChrootRunCommandWOutput = mock_runcmd 1089 # self.result.chromeos_root = '/tmp/chromeos' 1090 self.result.perf_data_files = [ 1091 "/tmp/chromeos/chroot/tmp/results/perf.data.0", 1092 "/tmp/chromeos/chroot/tmp/results/perf.data.1", 1093 ] 1094 # There is '-p <pid>' in command line but it's still system-wide: '-a'. 1095 cmd_line = "# cmdline : /usr/bin/perf record -e instructions -p {pid}" 1096 exp_perf_pids = ["1111", "2222"] 1097 mock_runcmd.side_effect = [ 1098 (0, cmd_line.format(pid=exp_perf_pids[0]), ""), 1099 (0, cmd_line.format(pid=exp_perf_pids[1]), ""), 1100 ] 1101 pids = self.result.ReadPidFromPerfData() 1102 self.assertEqual(pids, exp_perf_pids) 1103 1104 @mock.patch.object( 1105 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1106 ) 1107 def test_read_pid_from_perf_data_no_pid(self, mock_runcmd): 1108 """Test perf.data without PID.""" 1109 self.result.ce.ChrootRunCommandWOutput = mock_runcmd 1110 self.result.perf_data_files = [ 1111 "/tmp/chromeos/chroot/tmp/results/perf.data" 1112 ] 1113 cmd_line = "# cmdline : /usr/bin/perf record -e instructions" 1114 mock_runcmd.return_value = (0, cmd_line, "") 1115 pids = self.result.ReadPidFromPerfData() 1116 # pids is empty. 1117 self.assertEqual(pids, []) 1118 1119 @mock.patch.object( 1120 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1121 ) 1122 def test_read_pid_from_perf_data_system_wide(self, mock_runcmd): 1123 """Test reading from system-wide profile with PID.""" 1124 self.result.ce.ChrootRunCommandWOutput = mock_runcmd 1125 self.result.perf_data_files = [ 1126 "/tmp/chromeos/chroot/tmp/results/perf.data" 1127 ] 1128 # There is '-p <pid>' in command line but it's still system-wide: '-a'. 1129 cmd_line = "# cmdline : /usr/bin/perf record -e instructions -a -p 1234" 1130 mock_runcmd.return_value = (0, cmd_line, "") 1131 pids = self.result.ReadPidFromPerfData() 1132 # pids should be empty since it's not a per-process profiling. 1133 self.assertEqual(pids, []) 1134 1135 @mock.patch.object( 1136 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1137 ) 1138 def test_read_pid_from_perf_data_read_fail(self, mock_runcmd): 1139 """Failure to read perf.data raises the error.""" 1140 self.result.ce.ChrootRunCommandWOutput = mock_runcmd 1141 self.result.perf_data_files = [ 1142 "/tmp/chromeos/chroot/tmp/results/perf.data" 1143 ] 1144 # Error status of the profile read. 1145 mock_runcmd.return_value = (1, "", "") 1146 with self.assertRaises(PerfDataReadError): 1147 self.result.ReadPidFromPerfData() 1148 1149 @mock.patch.object( 1150 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1151 ) 1152 def test_read_pid_from_perf_data_fail(self, mock_runcmd): 1153 """Failure to find cmdline in perf.data header raises the error.""" 1154 self.result.ce.ChrootRunCommandWOutput = mock_runcmd 1155 self.result.perf_data_files = [ 1156 "/tmp/chromeos/chroot/tmp/results/perf.data" 1157 ] 1158 # Empty output. 1159 mock_runcmd.return_value = (0, "", "") 1160 with self.assertRaises(PerfDataReadError): 1161 self.result.ReadPidFromPerfData() 1162 1163 def test_process_turbostat_results_with_valid_data(self): 1164 """Normal case when log exists and contains valid data.""" 1165 self.result.turbostat_log_file = "/tmp/somelogfile.log" 1166 with mock.patch( 1167 "builtins.open", 1168 mock.mock_open(read_data=TURBOSTAT_LOG_OUTPUT), 1169 ) as mo: 1170 cpustats = self.result.ProcessTurbostatResults() 1171 # Check that the log got opened and data were read/parsed. 1172 calls = [mock.call("/tmp/somelogfile.log", encoding="utf-8")] 1173 mo.assert_has_calls(calls) 1174 self.assertEqual(cpustats, TURBOSTAT_DATA) 1175 1176 def test_process_turbostat_results_from_empty_file(self): 1177 """Error case when log exists but file is empty.""" 1178 self.result.turbostat_log_file = "/tmp/emptylogfile.log" 1179 with mock.patch("builtins.open", mock.mock_open(read_data="")) as mo: 1180 cpustats = self.result.ProcessTurbostatResults() 1181 # Check that the log got opened and parsed successfully and empty 1182 # data returned. 1183 calls = [mock.call("/tmp/emptylogfile.log", encoding="utf-8")] 1184 mo.assert_has_calls(calls) 1185 self.assertEqual(cpustats, {}) 1186 1187 def test_process_turbostat_results_when_file_doesnt_exist(self): 1188 """Error case when file does not exist.""" 1189 nonexistinglog = "/tmp/1" 1190 while os.path.exists(nonexistinglog): 1191 # Extend file path if it happens to exist. 1192 nonexistinglog = os.path.join(nonexistinglog, "1") 1193 self.result.turbostat_log_file = nonexistinglog 1194 # Allow the tested function to call a 'real' open and hopefully crash. 1195 with self.assertRaises(IOError): 1196 self.result.ProcessTurbostatResults() 1197 1198 def test_process_cpustats_results_with_uniq_data(self): 1199 """Process cpustats log which has freq unique to each core. 1200 1201 Testing normal case when frequency data vary between 1202 different cores. 1203 Expecting that data for all cores will be present in 1204 returned cpustats. 1205 """ 1206 self.result.cpustats_log_file = "/tmp/somelogfile.log" 1207 with mock.patch( 1208 "builtins.open", 1209 mock.mock_open(read_data=CPUSTATS_UNIQ_OUTPUT), 1210 ) as mo: 1211 cpustats = self.result.ProcessCpustatsResults() 1212 # Check that the log got opened and data were read/parsed. 1213 calls = [mock.call("/tmp/somelogfile.log", encoding="utf-8")] 1214 mo.assert_has_calls(calls) 1215 self.assertEqual(cpustats, CPUSTATS_UNIQ_DATA) 1216 1217 def test_process_cpustats_results_with_dupl_data(self): 1218 """Process cpustats log where cores have duplicate freq. 1219 1220 Testing normal case when frequency data on some cores 1221 are duplicated. 1222 Expecting that duplicated data is discarded in 1223 returned cpustats. 1224 """ 1225 self.result.cpustats_log_file = "/tmp/somelogfile.log" 1226 with mock.patch( 1227 "builtins.open", 1228 mock.mock_open(read_data=CPUSTATS_DUPL_OUTPUT), 1229 ) as mo: 1230 cpustats = self.result.ProcessCpustatsResults() 1231 # Check that the log got opened and data were read/parsed. 1232 calls = [mock.call("/tmp/somelogfile.log", encoding="utf-8")] 1233 mo.assert_has_calls(calls) 1234 self.assertEqual(cpustats, CPUSTATS_DUPL_DATA) 1235 1236 def test_process_cpustats_results_from_empty_file(self): 1237 """Error case when log exists but file is empty.""" 1238 self.result.cpustats_log_file = "/tmp/emptylogfile.log" 1239 with mock.patch("builtins.open", mock.mock_open(read_data="")) as mo: 1240 cpustats = self.result.ProcessCpustatsResults() 1241 # Check that the log got opened and parsed successfully and empty 1242 # data returned. 1243 calls = [mock.call("/tmp/emptylogfile.log", encoding="utf-8")] 1244 mo.assert_has_calls(calls) 1245 self.assertEqual(cpustats, {}) 1246 1247 def test_process_top_results_with_valid_data(self): 1248 """Process top log with valid data.""" 1249 1250 self.result.top_log_file = "/tmp/fakelogfile.log" 1251 with mock.patch( 1252 "builtins.open", mock.mock_open(read_data=TOP_LOG) 1253 ) as mo: 1254 topproc = self.result.ProcessTopResults() 1255 # Check that the log got opened and data were read/parsed. 1256 calls = [mock.call("/tmp/fakelogfile.log", encoding="utf-8")] 1257 mo.assert_has_calls(calls) 1258 self.assertEqual(topproc, TOP_DATA) 1259 1260 def test_process_top_results_from_empty_file(self): 1261 """Error case when log exists but file is empty.""" 1262 self.result.top_log_file = "/tmp/emptylogfile.log" 1263 with mock.patch("builtins.open", mock.mock_open(read_data="")) as mo: 1264 topcalls = self.result.ProcessTopResults() 1265 # Check that the log got opened and parsed successfully and empty 1266 # data returned. 1267 calls = [mock.call("/tmp/emptylogfile.log", encoding="utf-8")] 1268 mo.assert_has_calls(calls) 1269 self.assertEqual(topcalls, []) 1270 1271 def test_format_string_top_cmds(self): 1272 """Test formatted string with top commands.""" 1273 self.result.top_cmds = [ 1274 { 1275 "cmd": "chrome-111", 1276 "cpu_use_avg": 119.753453465, 1277 "count": 44444, 1278 "top5_cpu_use": [222.8, 217.9, 217.8, 191.0, 189.9], 1279 }, 1280 { 1281 "cmd": "chrome-222", 1282 "cpu_use_avg": 100, 1283 "count": 33333, 1284 "top5_cpu_use": [200.0, 195.0, 190.0, 185.0, 180.0], 1285 }, 1286 { 1287 "cmd": "irq/230-cros-ec", 1288 "cpu_use_avg": 10.000000000000001, 1289 "count": 1000, 1290 "top5_cpu_use": [11.5, 11.4, 11.3, 11.2, 11.1], 1291 }, 1292 { 1293 "cmd": "powerd", 1294 "cpu_use_avg": 2.0, 1295 "count": 2, 1296 "top5_cpu_use": [3.0, 1.0], 1297 }, 1298 { 1299 "cmd": "cmd3", 1300 "cpu_use_avg": 1.0, 1301 "count": 1, 1302 "top5_cpu_use": [1.0], 1303 }, 1304 { 1305 "cmd": "cmd4", 1306 "cpu_use_avg": 1.0, 1307 "count": 1, 1308 "top5_cpu_use": [1.0], 1309 }, 1310 { 1311 "cmd": "cmd5", 1312 "cpu_use_avg": 1.0, 1313 "count": 1, 1314 "top5_cpu_use": [1.0], 1315 }, 1316 { 1317 "cmd": "cmd6_not_for_print", 1318 "cpu_avg": 1.0, 1319 "count": 1, 1320 "top5": [1.0], 1321 }, 1322 ] 1323 form_str = self.result.FormatStringTopCommands() 1324 self.assertEqual( 1325 form_str, 1326 "\n".join( 1327 [ 1328 "Top commands with highest CPU usage:", 1329 " COMMAND AVG CPU% COUNT HIGHEST 5", 1330 "-" * 50, 1331 " chrome-111 119.75 44444 " 1332 "[222.8, 217.9, 217.8, 191.0, 189.9]", 1333 " chrome-222 100.00 33333 " 1334 "[200.0, 195.0, 190.0, 185.0, 180.0]", 1335 " irq/230-cros-ec 10.00 1000 " 1336 "[11.5, 11.4, 11.3, 11.2, 11.1]", 1337 " powerd 2.00 2 [3.0, 1.0]", 1338 " cmd3 1.00 1 [1.0]", 1339 " cmd4 1.00 1 [1.0]", 1340 " cmd5 1.00 1 [1.0]", 1341 "-" * 50, 1342 ] 1343 ), 1344 ) 1345 1346 def test_format_string_top_calls_no_data(self): 1347 """Test formatted string of top with no data.""" 1348 self.result.top_cmds = [] 1349 form_str = self.result.FormatStringTopCommands() 1350 self.assertEqual( 1351 form_str, 1352 "\n".join( 1353 [ 1354 "Top commands with highest CPU usage:", 1355 " COMMAND AVG CPU% COUNT HIGHEST 5", 1356 "-" * 50, 1357 "[NO DATA FROM THE TOP LOG]", 1358 "-" * 50, 1359 ] 1360 ), 1361 ) 1362 1363 @mock.patch.object(os.path, "exists") 1364 @mock.patch.object(misc, "GetInsideChrootPath") 1365 @mock.patch.object(command_executer.CommandExecuter, "ChrootRunCommand") 1366 def test_generate_perf_report_files( 1367 self, mock_chrootruncmd, mock_getpath, mock_pathexists 1368 ): 1369 mock_pathexists.side_effect = ( 1370 lambda x: self.orig_exists(x) 1371 if x != "/etc/cros_chroot_version" 1372 else False 1373 ) 1374 fake_file = "/tmp/results/perf.data.report" 1375 self.result.perf_data_files = ["/tmp/results/perf.data"] 1376 self.result.board = "lumpy" 1377 mock_getpath.return_value = fake_file 1378 self.result.ce.ChrootRunCommand = mock_chrootruncmd 1379 mock_chrootruncmd.return_value = 0 1380 # Debug path not found 1381 self.result.label.debug_path = "" 1382 tmp = self.result.GeneratePerfReportFiles() 1383 self.assertEqual(len(tmp), 1) 1384 self.assertEqual(tmp[0], RegexMatcher("/tmp/chromeos.*%s" % fake_file)) 1385 self.assertEqual( 1386 mock_chrootruncmd.call_args_list[0][0], 1387 ( 1388 self.result.chromeos_root, 1389 ("/usr/sbin/perf report -n " "-i %s --stdio > %s") 1390 % (fake_file, fake_file), 1391 ), 1392 ) 1393 1394 @mock.patch.object(os.path, "exists") 1395 @mock.patch.object(misc, "GetInsideChrootPath") 1396 @mock.patch.object(command_executer.CommandExecuter, "ChrootRunCommand") 1397 def test_generate_perf_report_files_debug( 1398 self, mock_chrootruncmd, mock_getpath, mock_pathexists 1399 ): 1400 mock_pathexists.side_effect = ( 1401 lambda x: self.orig_exists(x) 1402 if x != "/etc/cros_chroot_version" 1403 else False 1404 ) 1405 fake_file = "/tmp/results/perf.data.report" 1406 self.result.perf_data_files = ["/tmp/results/perf.data"] 1407 self.result.board = "lumpy" 1408 mock_getpath.return_value = fake_file 1409 self.result.ce.ChrootRunCommand = mock_chrootruncmd 1410 mock_chrootruncmd.return_value = 0 1411 # Debug path found 1412 self.result.label.debug_path = "/tmp/debug" 1413 tmp = self.result.GeneratePerfReportFiles() 1414 self.assertEqual(len(tmp), 1) 1415 self.assertEqual(tmp[0], RegexMatcher("/tmp/chromeos.*%s" % fake_file)) 1416 self.assertEqual( 1417 mock_chrootruncmd.call_args_list[0][0], 1418 ( 1419 self.result.chromeos_root, 1420 ( 1421 "/usr/sbin/perf report -n --symfs /tmp/debug " 1422 "--vmlinux /tmp/debug/usr/lib/debug/boot/vmlinux " 1423 "-i %s --stdio > %s" 1424 ) 1425 % (fake_file, fake_file), 1426 ), 1427 ) 1428 1429 @mock.patch.object(misc, "GetOutsideChrootPath") 1430 def test_populate_from_run(self, mock_getpath): 1431 def FakeGetResultsDir(): 1432 self.callGetResultsDir = True 1433 return "/tmp/results_dir" 1434 1435 def FakeGetResultsFile(): 1436 self.callGetResultsFile = True 1437 return [] 1438 1439 def FakeGetPerfDataFiles(): 1440 self.callGetPerfDataFiles = True 1441 return [] 1442 1443 def FakeGetPerfReportFiles(): 1444 self.callGetPerfReportFiles = True 1445 return [] 1446 1447 def FakeGetTurbostatFile(): 1448 self.callGetTurbostatFile = True 1449 return [] 1450 1451 def FakeGetCpustatsFile(): 1452 self.callGetCpustatsFile = True 1453 return [] 1454 1455 def FakeGetTopFile(): 1456 self.callGetTopFile = True 1457 return [] 1458 1459 def FakeGetCpuinfoFile(): 1460 self.callGetCpuinfoFile = True 1461 return [] 1462 1463 def FakeGetWaitTimeFile(): 1464 self.callGetWaitTimeFile = True 1465 return [] 1466 1467 def FakeProcessResults(show_results=False): 1468 if show_results: 1469 pass 1470 self.callProcessResults = True 1471 1472 if mock_getpath: 1473 pass 1474 mock.get_path = "/tmp/chromeos/tmp/results_dir" 1475 1476 self.callGetResultsDir = False 1477 self.callGetResultsFile = False 1478 self.callGetPerfDataFiles = False 1479 self.callGetPerfReportFiles = False 1480 self.callGetTurbostatFile = False 1481 self.callGetCpustatsFile = False 1482 self.callGetTopFile = False 1483 self.callGetCpuinfoFile = False 1484 self.callGetWaitTimeFile = False 1485 self.callProcessResults = False 1486 1487 self.result.GetResultsDir = FakeGetResultsDir 1488 self.result.GetResultsFile = FakeGetResultsFile 1489 self.result.GetPerfDataFiles = FakeGetPerfDataFiles 1490 self.result.GeneratePerfReportFiles = FakeGetPerfReportFiles 1491 self.result.GetTurbostatFile = FakeGetTurbostatFile 1492 self.result.GetCpustatsFile = FakeGetCpustatsFile 1493 self.result.GetTopFile = FakeGetTopFile 1494 self.result.GetCpuinfoFile = FakeGetCpuinfoFile 1495 self.result.GetWaitTimeFile = FakeGetWaitTimeFile 1496 self.result.ProcessResults = FakeProcessResults 1497 1498 self.result.PopulateFromRun( 1499 OUTPUT, "", 0, "test", "telemetry_Crosperf", "chrome" 1500 ) 1501 self.assertTrue(self.callGetResultsDir) 1502 self.assertTrue(self.callGetResultsFile) 1503 self.assertTrue(self.callGetPerfDataFiles) 1504 self.assertTrue(self.callGetPerfReportFiles) 1505 self.assertTrue(self.callGetTurbostatFile) 1506 self.assertTrue(self.callGetCpustatsFile) 1507 self.assertTrue(self.callGetTopFile) 1508 self.assertTrue(self.callGetCpuinfoFile) 1509 self.assertTrue(self.callGetWaitTimeFile) 1510 self.assertTrue(self.callProcessResults) 1511 1512 def FakeGetKeyvals(self, show_all=False): 1513 if show_all: 1514 return {"first_time": 680, "Total": 10} 1515 else: 1516 return {"Total": 10} 1517 1518 def test_process_results(self): 1519 def FakeGatherPerfResults(): 1520 self.callGatherPerfResults = True 1521 1522 def FakeGetSamples(): 1523 return (1, "samples") 1524 1525 # Test 1 1526 self.callGatherPerfResults = False 1527 1528 self.result.GetKeyvals = self.FakeGetKeyvals 1529 self.result.GatherPerfResults = FakeGatherPerfResults 1530 1531 self.result.retval = 0 1532 self.result.ProcessResults() 1533 self.assertTrue(self.callGatherPerfResults) 1534 self.assertEqual(len(self.result.keyvals), 2) 1535 self.assertEqual(self.result.keyvals, {"Total": 10, "retval": 0}) 1536 1537 # Test 2 1538 self.result.retval = 1 1539 self.result.ProcessResults() 1540 self.assertEqual(len(self.result.keyvals), 2) 1541 self.assertEqual(self.result.keyvals, {"Total": 10, "retval": 1}) 1542 1543 # Test 3 1544 self.result.cwp_dso = "chrome" 1545 self.result.retval = 0 1546 self.result.GetSamples = FakeGetSamples 1547 self.result.ProcessResults() 1548 self.assertEqual(len(self.result.keyvals), 3) 1549 self.assertEqual( 1550 self.result.keyvals, 1551 {"Total": 10, "samples": (1, "samples"), "retval": 0}, 1552 ) 1553 1554 # Test 4. Parse output of benchmarks with multiple sotries in histogram 1555 # format 1556 self.result.suite = "telemetry_Crosperf" 1557 self.result.results_file = [tempfile.mkdtemp() + "/histograms.json"] 1558 with open(self.result.results_file[0], "w", encoding="utf-8") as f: 1559 f.write(HISTOGRAMSET) 1560 self.result.ProcessResults() 1561 shutil.rmtree(os.path.dirname(self.result.results_file[0])) 1562 # Verify the summary for the story is correct 1563 self.assertEqual( 1564 self.result.keyvals["timeToFirstContentfulPaint__typical"], 1565 [880.000, "ms_smallerIsBetter"], 1566 ) 1567 # Veirfy the summary for a certain stroy tag is correct 1568 self.assertEqual( 1569 self.result.keyvals[ 1570 "timeToFirstContentfulPaint__cache_temperature:cold" 1571 ], 1572 [1000.000, "ms_smallerIsBetter"], 1573 ) 1574 self.assertEqual( 1575 self.result.keyvals[ 1576 "timeToFirstContentfulPaint__cache_temperature:warm" 1577 ], 1578 [800.000, "ms_smallerIsBetter"], 1579 ) 1580 1581 @mock.patch.object(Result, "ProcessCpustatsResults") 1582 @mock.patch.object(Result, "ProcessTurbostatResults") 1583 def test_process_results_with_turbostat_log( 1584 self, mock_proc_turbo, mock_proc_cpustats 1585 ): 1586 self.result.GetKeyvals = self.FakeGetKeyvals 1587 1588 self.result.retval = 0 1589 self.result.turbostat_log_file = "/tmp/turbostat.log" 1590 mock_proc_turbo.return_value = { 1591 "cpufreq": {"all": [1, 2, 3]}, 1592 "cputemp": {"all": [5.0, 6.0, 7.0]}, 1593 } 1594 self.result.ProcessResults() 1595 mock_proc_turbo.assert_has_calls([mock.call()]) 1596 mock_proc_cpustats.assert_not_called() 1597 self.assertEqual(len(self.result.keyvals), 8) 1598 self.assertEqual( 1599 self.result.keyvals, 1600 { 1601 "Total": 10, 1602 "cpufreq_all_avg": 2, 1603 "cpufreq_all_max": 3, 1604 "cpufreq_all_min": 1, 1605 "cputemp_all_avg": 6.0, 1606 "cputemp_all_min": 5.0, 1607 "cputemp_all_max": 7.0, 1608 "retval": 0, 1609 }, 1610 ) 1611 1612 @mock.patch.object(Result, "ProcessCpustatsResults") 1613 @mock.patch.object(Result, "ProcessTurbostatResults") 1614 def test_process_results_with_cpustats_log( 1615 self, mock_proc_turbo, mock_proc_cpustats 1616 ): 1617 self.result.GetKeyvals = self.FakeGetKeyvals 1618 1619 self.result.retval = 0 1620 self.result.cpustats_log_file = "/tmp/cpustats.log" 1621 mock_proc_cpustats.return_value = { 1622 "cpufreq": {"cpu0": [100, 100, 100], "cpu1": [4, 5, 6]}, 1623 "cputemp": { 1624 "little": [20.2, 20.2, 20.2], 1625 "big": [55.2, 66.1, 77.3], 1626 }, 1627 } 1628 self.result.ProcessResults() 1629 mock_proc_turbo.assert_not_called() 1630 mock_proc_cpustats.assert_has_calls([mock.call()]) 1631 self.assertEqual(len(self.result.keyvals), 10) 1632 self.assertEqual( 1633 self.result.keyvals, 1634 { 1635 "Total": 10, 1636 "cpufreq_cpu0_avg": 100, 1637 "cpufreq_cpu1_avg": 5, 1638 "cpufreq_cpu1_max": 6, 1639 "cpufreq_cpu1_min": 4, 1640 "cputemp_big_avg": 66.2, 1641 "cputemp_big_max": 77.3, 1642 "cputemp_big_min": 55.2, 1643 "cputemp_little_avg": 20.2, 1644 "retval": 0, 1645 }, 1646 ) 1647 1648 @mock.patch.object(Result, "ProcessCpustatsResults") 1649 @mock.patch.object(Result, "ProcessTurbostatResults") 1650 def test_process_results_with_turbostat_and_cpustats_logs( 1651 self, mock_proc_turbo, mock_proc_cpustats 1652 ): 1653 self.result.GetKeyvals = self.FakeGetKeyvals 1654 1655 self.result.retval = 0 1656 self.result.turbostat_log_file = "/tmp/turbostat.log" 1657 self.result.cpustats_log_file = "/tmp/cpustats.log" 1658 mock_proc_turbo.return_value = { 1659 "cpufreq": {"all": [1, 2, 3]}, 1660 "cputemp": {"all": [5.0, 6.0, 7.0]}, 1661 } 1662 self.result.ProcessResults() 1663 mock_proc_turbo.assert_has_calls([mock.call()]) 1664 mock_proc_cpustats.assert_not_called() 1665 self.assertEqual(len(self.result.keyvals), 8) 1666 self.assertEqual( 1667 self.result.keyvals, 1668 { 1669 "Total": 10, 1670 "cpufreq_all_avg": 2, 1671 "cpufreq_all_max": 3, 1672 "cpufreq_all_min": 1, 1673 "cputemp_all_avg": 6.0, 1674 "cputemp_all_min": 5.0, 1675 "cputemp_all_max": 7.0, 1676 "retval": 0, 1677 }, 1678 ) 1679 1680 @mock.patch.object(Result, "ProcessCpustatsResults") 1681 @mock.patch.object(Result, "ProcessTurbostatResults") 1682 def test_process_results_without_cpu_data( 1683 self, mock_proc_turbo, mock_proc_cpustats 1684 ): 1685 self.result.GetKeyvals = self.FakeGetKeyvals 1686 1687 self.result.retval = 0 1688 self.result.turbostat_log_file = "" 1689 self.result.cpustats_log_file = "" 1690 self.result.ProcessResults() 1691 mock_proc_turbo.assert_not_called() 1692 mock_proc_cpustats.assert_not_called() 1693 self.assertEqual(len(self.result.keyvals), 2) 1694 self.assertEqual(self.result.keyvals, {"Total": 10, "retval": 0}) 1695 1696 @mock.patch.object(misc, "GetInsideChrootPath") 1697 @mock.patch.object( 1698 command_executer.CommandExecuter, "ChrootRunCommandWOutput" 1699 ) 1700 def test_populate_from_cache_dir(self, mock_runchrootcmd, mock_getpath): 1701 # pylint: disable=redefined-builtin 1702 def FakeMkdtemp(dir=None): 1703 if dir: 1704 pass 1705 return self.tmpdir 1706 1707 def FakeGetSamples(): 1708 return [1, "samples"] 1709 1710 current_path = os.getcwd() 1711 cache_dir = os.path.join(current_path, "test_cache/test_input") 1712 self.result.ce = command_executer.GetCommandExecuter( 1713 log_level="average" 1714 ) 1715 self.result.ce.ChrootRunCommandWOutput = mock_runchrootcmd 1716 mock_runchrootcmd.return_value = [ 1717 "", 1718 ("%s,PASS\n%s/\telemetry_Crosperf,PASS\n") % (TMP_DIR1, TMP_DIR1), 1719 "", 1720 ] 1721 mock_getpath.return_value = TMP_DIR1 1722 self.tmpdir = tempfile.mkdtemp() 1723 save_real_mkdtemp = tempfile.mkdtemp 1724 tempfile.mkdtemp = FakeMkdtemp 1725 1726 self.result.PopulateFromCacheDir( 1727 cache_dir, "sunspider", "telemetry_Crosperf", "" 1728 ) 1729 self.assertEqual( 1730 self.result.keyvals, 1731 { 1732 "Total__Total": [444.0, "ms"], 1733 "regexp-dna__regexp-dna": [16.2, "ms"], 1734 "telemetry_page_measurement_results__num_failed": [ 1735 0, 1736 "count", 1737 ], 1738 "telemetry_page_measurement_results__num_errored": [ 1739 0, 1740 "count", 1741 ], 1742 "string-fasta__string-fasta": [23.2, "ms"], 1743 "crypto-sha1__crypto-sha1": [11.6, "ms"], 1744 "bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte": [ 1745 3.2, 1746 "ms", 1747 ], 1748 "access-nsieve__access-nsieve": [7.9, "ms"], 1749 "bitops-nsieve-bits__bitops-nsieve-bits": [9.4, "ms"], 1750 "string-validate-input__string-validate-input": [19.3, "ms"], 1751 "3d-raytrace__3d-raytrace": [24.7, "ms"], 1752 "3d-cube__3d-cube": [28.0, "ms"], 1753 "string-unpack-code__string-unpack-code": [46.7, "ms"], 1754 "date-format-tofte__date-format-tofte": [26.3, "ms"], 1755 "math-partial-sums__math-partial-sums": [22.0, "ms"], 1756 "\telemetry_Crosperf": ["PASS", ""], 1757 "crypto-aes__crypto-aes": [15.2, "ms"], 1758 "bitops-bitwise-and__bitops-bitwise-and": [8.4, "ms"], 1759 "crypto-md5__crypto-md5": [10.5, "ms"], 1760 "string-tagcloud__string-tagcloud": [52.8, "ms"], 1761 "access-nbody__access-nbody": [8.5, "ms"], 1762 "retval": 0, 1763 "math-spectral-norm__math-spectral-norm": [6.6, "ms"], 1764 "math-cordic__math-cordic": [8.7, "ms"], 1765 "access-binary-trees__access-binary-trees": [4.5, "ms"], 1766 "controlflow-recursive__controlflow-recursive": [4.4, "ms"], 1767 "access-fannkuch__access-fannkuch": [17.8, "ms"], 1768 "string-base64__string-base64": [16.0, "ms"], 1769 "date-format-xparb__date-format-xparb": [20.9, "ms"], 1770 "3d-morph__3d-morph": [22.1, "ms"], 1771 "bitops-bits-in-byte__bitops-bits-in-byte": [9.1, "ms"], 1772 }, 1773 ) 1774 1775 self.result.GetSamples = FakeGetSamples 1776 self.result.PopulateFromCacheDir( 1777 cache_dir, "sunspider", "telemetry_Crosperf", "chrome" 1778 ) 1779 self.assertEqual( 1780 self.result.keyvals, 1781 { 1782 "Total__Total": [444.0, "ms"], 1783 "regexp-dna__regexp-dna": [16.2, "ms"], 1784 "telemetry_page_measurement_results__num_failed": [ 1785 0, 1786 "count", 1787 ], 1788 "telemetry_page_measurement_results__num_errored": [ 1789 0, 1790 "count", 1791 ], 1792 "string-fasta__string-fasta": [23.2, "ms"], 1793 "crypto-sha1__crypto-sha1": [11.6, "ms"], 1794 "bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte": [ 1795 3.2, 1796 "ms", 1797 ], 1798 "access-nsieve__access-nsieve": [7.9, "ms"], 1799 "bitops-nsieve-bits__bitops-nsieve-bits": [9.4, "ms"], 1800 "string-validate-input__string-validate-input": [19.3, "ms"], 1801 "3d-raytrace__3d-raytrace": [24.7, "ms"], 1802 "3d-cube__3d-cube": [28.0, "ms"], 1803 "string-unpack-code__string-unpack-code": [46.7, "ms"], 1804 "date-format-tofte__date-format-tofte": [26.3, "ms"], 1805 "math-partial-sums__math-partial-sums": [22.0, "ms"], 1806 "\telemetry_Crosperf": ["PASS", ""], 1807 "crypto-aes__crypto-aes": [15.2, "ms"], 1808 "bitops-bitwise-and__bitops-bitwise-and": [8.4, "ms"], 1809 "crypto-md5__crypto-md5": [10.5, "ms"], 1810 "string-tagcloud__string-tagcloud": [52.8, "ms"], 1811 "access-nbody__access-nbody": [8.5, "ms"], 1812 "retval": 0, 1813 "math-spectral-norm__math-spectral-norm": [6.6, "ms"], 1814 "math-cordic__math-cordic": [8.7, "ms"], 1815 "access-binary-trees__access-binary-trees": [4.5, "ms"], 1816 "controlflow-recursive__controlflow-recursive": [4.4, "ms"], 1817 "access-fannkuch__access-fannkuch": [17.8, "ms"], 1818 "string-base64__string-base64": [16.0, "ms"], 1819 "date-format-xparb__date-format-xparb": [20.9, "ms"], 1820 "3d-morph__3d-morph": [22.1, "ms"], 1821 "bitops-bits-in-byte__bitops-bits-in-byte": [9.1, "ms"], 1822 "samples": [1, "samples"], 1823 }, 1824 ) 1825 1826 # Clean up after test. 1827 tempfile.mkdtemp = save_real_mkdtemp 1828 command = "rm -Rf %s" % self.tmpdir 1829 self.result.ce.RunCommand(command) 1830 1831 @mock.patch.object(misc, "GetRoot") 1832 @mock.patch.object(command_executer.CommandExecuter, "RunCommand") 1833 def test_cleanup(self, mock_runcmd, mock_getroot): 1834 # Test 1. 'rm_chroot_tmp' is True; self.results_dir exists; 1835 # self.temp_dir exists; results_dir name contains 'test_that_results_'. 1836 mock_getroot.return_value = [ 1837 "/tmp/tmp_AbcXyz", 1838 "test_that_results_fake", 1839 ] 1840 self.result.ce.RunCommand = mock_runcmd 1841 self.result.results_dir = "test_results_dir" 1842 self.result.temp_dir = "testtemp_dir" 1843 self.result.CleanUp(True) 1844 self.assertEqual(mock_getroot.call_count, 1) 1845 self.assertEqual(mock_runcmd.call_count, 2) 1846 self.assertEqual( 1847 mock_runcmd.call_args_list[0][0], ("rm -rf test_results_dir",) 1848 ) 1849 self.assertEqual( 1850 mock_runcmd.call_args_list[1][0], ("rm -rf testtemp_dir",) 1851 ) 1852 1853 # Test 2. Same, except ath results_dir name does not contain 1854 # 'test_that_results_' 1855 mock_getroot.reset_mock() 1856 mock_runcmd.reset_mock() 1857 mock_getroot.return_value = ["/tmp/tmp_AbcXyz", "other_results_fake"] 1858 self.result.ce.RunCommand = mock_runcmd 1859 self.result.results_dir = "test_results_dir" 1860 self.result.temp_dir = "testtemp_dir" 1861 self.result.CleanUp(True) 1862 self.assertEqual(mock_getroot.call_count, 1) 1863 self.assertEqual(mock_runcmd.call_count, 2) 1864 self.assertEqual( 1865 mock_runcmd.call_args_list[0][0], ("rm -rf /tmp/tmp_AbcXyz",) 1866 ) 1867 self.assertEqual( 1868 mock_runcmd.call_args_list[1][0], ("rm -rf testtemp_dir",) 1869 ) 1870 1871 # Test 3. mock_getroot returns nothing; 'rm_chroot_tmp' is False. 1872 mock_getroot.reset_mock() 1873 mock_runcmd.reset_mock() 1874 self.result.CleanUp(False) 1875 self.assertEqual(mock_getroot.call_count, 0) 1876 self.assertEqual(mock_runcmd.call_count, 1) 1877 self.assertEqual( 1878 mock_runcmd.call_args_list[0][0], ("rm -rf testtemp_dir",) 1879 ) 1880 1881 # Test 4. 'rm_chroot_tmp' is True, but result_dir & temp_dir are None. 1882 mock_getroot.reset_mock() 1883 mock_runcmd.reset_mock() 1884 self.result.results_dir = None 1885 self.result.temp_dir = None 1886 self.result.CleanUp(True) 1887 self.assertEqual(mock_getroot.call_count, 0) 1888 self.assertEqual(mock_runcmd.call_count, 0) 1889 1890 @mock.patch.object(misc, "GetInsideChrootPath") 1891 @mock.patch.object(command_executer.CommandExecuter, "ChrootRunCommand") 1892 def test_store_to_cache_dir(self, mock_chrootruncmd, mock_getpath): 1893 def FakeMkdtemp(directory=""): 1894 if directory: 1895 pass 1896 return self.tmpdir 1897 1898 if mock_chrootruncmd or mock_getpath: 1899 pass 1900 current_path = os.getcwd() 1901 cache_dir = os.path.join(current_path, "test_cache/test_output") 1902 1903 self.result.ce = command_executer.GetCommandExecuter( 1904 log_level="average" 1905 ) 1906 self.result.out = OUTPUT 1907 self.result.err = error 1908 self.result.retval = 0 1909 self.tmpdir = tempfile.mkdtemp() 1910 if not os.path.exists(self.tmpdir): 1911 os.makedirs(self.tmpdir) 1912 self.result.results_dir = os.path.join(os.getcwd(), "test_cache") 1913 save_real_mkdtemp = tempfile.mkdtemp 1914 tempfile.mkdtemp = FakeMkdtemp 1915 1916 mock_mm = machine_manager.MockMachineManager( 1917 "/tmp/chromeos_root", 0, "average", "" 1918 ) 1919 mock_mm.machine_checksum_string[ 1920 "mock_label" 1921 ] = "fake_machine_checksum123" 1922 1923 mock_keylist = ["key1", "key2", "key3"] 1924 test_flag.SetTestMode(True) 1925 self.result.StoreToCacheDir(cache_dir, mock_mm, mock_keylist) 1926 1927 # Check that the correct things were written to the 'cache'. 1928 test_dir = os.path.join(os.getcwd(), "test_cache/test_output") 1929 base_dir = os.path.join(os.getcwd(), "test_cache/compare_output") 1930 self.assertTrue(os.path.exists(os.path.join(test_dir, "autotest.tbz2"))) 1931 self.assertTrue(os.path.exists(os.path.join(test_dir, "machine.txt"))) 1932 self.assertTrue( 1933 os.path.exists(os.path.join(test_dir, "results.pickle")) 1934 ) 1935 1936 f1 = os.path.join(test_dir, "machine.txt") 1937 f2 = os.path.join(base_dir, "machine.txt") 1938 cmd = "diff %s %s" % (f1, f2) 1939 [_, out, _] = self.result.ce.RunCommandWOutput(cmd) 1940 self.assertEqual(len(out), 0) 1941 1942 f1 = os.path.join(test_dir, "results.pickle") 1943 f2 = os.path.join(base_dir, "results.pickle") 1944 with open(f1, "rb") as f: 1945 f1_obj = pickle.load(f) 1946 with open(f2, "rb") as f: 1947 f2_obj = pickle.load(f) 1948 self.assertEqual(f1_obj, f2_obj) 1949 1950 # Clean up after test. 1951 tempfile.mkdtemp = save_real_mkdtemp 1952 command = "rm %s/*" % test_dir 1953 self.result.ce.RunCommand(command) 1954 1955 1956TELEMETRY_RESULT_KEYVALS = { 1957 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1958 "math-cordic (ms)": "11.4", 1959 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1960 "access-nbody (ms)": "6.9", 1961 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1962 "access-fannkuch (ms)": "26.3", 1963 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1964 "math-spectral-norm (ms)": "6.3", 1965 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1966 "bitops-nsieve-bits (ms)": "9.3", 1967 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1968 "math-partial-sums (ms)": "32.8", 1969 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1970 "regexp-dna (ms)": "16.1", 1971 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1972 "3d-cube (ms)": "42.7", 1973 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1974 "crypto-md5 (ms)": "10.8", 1975 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1976 "crypto-sha1 (ms)": "12.4", 1977 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1978 "string-tagcloud (ms)": "47.2", 1979 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1980 "string-fasta (ms)": "36.3", 1981 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1982 "access-binary-trees (ms)": "7.3", 1983 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1984 "date-format-xparb (ms)": "138.1", 1985 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1986 "crypto-aes (ms)": "19.2", 1987 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1988 "Total (ms)": "656.5", 1989 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1990 "string-base64 (ms)": "17.5", 1991 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1992 "string-validate-input (ms)": "24.8", 1993 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1994 "3d-raytrace (ms)": "28.7", 1995 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1996 "controlflow-recursive (ms)": "5.3", 1997 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 1998 "bitops-bits-in-byte (ms)": "9.8", 1999 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 2000 "3d-morph (ms)": "50.2", 2001 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 2002 "bitops-bitwise-and (ms)": "8.8", 2003 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 2004 "access-nsieve (ms)": "8.6", 2005 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 2006 "date-format-tofte (ms)": "31.2", 2007 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 2008 "bitops-3bit-bits-in-byte (ms)": "3.5", 2009 "retval": 0, 2010 "http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html " 2011 "string-unpack-code (ms)": "45.0", 2012} 2013 2014PURE_TELEMETRY_OUTPUT = """ 2015page_name,3d-cube (ms),3d-morph (ms),3d-raytrace (ms),Total (ms),access-binary-trees (ms),access-fannkuch (ms),access-nbody (ms),access-nsieve (ms),bitops-3bit-bits-in-byte (ms),bitops-bits-in-byte (ms),bitops-bitwise-and (ms),bitops-nsieve-bits (ms),controlflow-recursive (ms),crypto-aes (ms),crypto-md5 (ms),crypto-sha1 (ms),date-format-tofte (ms),date-format-xparb (ms),math-cordic (ms),math-partial-sums (ms),math-spectral-norm (ms),regexp-dna (ms),string-base64 (ms),string-fasta (ms),string-tagcloud (ms),string-unpack-code (ms),string-validate-input (ms)\r\nhttp://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html,42.7,50.2,28.7,656.5,7.3,26.3,6.9,8.6,3.5,9.8,8.8,9.3,5.3,19.2,10.8,12.4,31.2,138.1,11.4,32.8,6.3,16.1,17.5,36.3,47.2,45.0,24.8\r 2016""" 2017 2018 2019class TelemetryResultTest(unittest.TestCase): 2020 """Telemetry result test.""" 2021 2022 def __init__(self, *args, **kwargs): 2023 super(TelemetryResultTest, self).__init__(*args, **kwargs) 2024 self.callFakeProcessResults = False 2025 self.result = None 2026 self.mock_logger = mock.Mock(spec=logger.Logger) 2027 self.mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter) 2028 self.mock_label = MockLabel( 2029 "mock_label", 2030 "build", 2031 "chromeos_image", 2032 "autotest_dir", 2033 "debug_dir", 2034 "/tmp", 2035 "lumpy", 2036 "remote", 2037 "image_args", 2038 "cache_dir", 2039 "average", 2040 "gcc", 2041 False, 2042 None, 2043 ) 2044 self.mock_machine = machine_manager.MockCrosMachine( 2045 "falco.cros", "/tmp/chromeos", "average" 2046 ) 2047 2048 def test_populate_from_run(self): 2049 def FakeProcessResults(): 2050 self.callFakeProcessResults = True 2051 2052 self.callFakeProcessResults = False 2053 self.result = TelemetryResult( 2054 self.mock_logger, self.mock_label, "average", self.mock_cmd_exec 2055 ) 2056 self.result.ProcessResults = FakeProcessResults 2057 self.result.PopulateFromRun( 2058 OUTPUT, error, 3, "fake_test", "telemetry_Crosperf", "" 2059 ) 2060 self.assertTrue(self.callFakeProcessResults) 2061 self.assertEqual(self.result.out, OUTPUT) 2062 self.assertEqual(self.result.err, error) 2063 self.assertEqual(self.result.retval, 3) 2064 2065 def test_populate_from_cache_dir_and_process_results(self): 2066 self.result = TelemetryResult( 2067 self.mock_logger, self.mock_label, "average", self.mock_machine 2068 ) 2069 current_path = os.getcwd() 2070 cache_dir = os.path.join( 2071 current_path, "test_cache/test_puretelemetry_input" 2072 ) 2073 self.result.PopulateFromCacheDir(cache_dir, "", "", "") 2074 self.assertEqual(self.result.out.strip(), PURE_TELEMETRY_OUTPUT.strip()) 2075 self.assertEqual(self.result.err, "") 2076 self.assertEqual(self.result.retval, 0) 2077 self.assertEqual(self.result.keyvals, TELEMETRY_RESULT_KEYVALS) 2078 2079 2080class ResultsCacheTest(unittest.TestCase): 2081 """Resultcache test class.""" 2082 2083 def __init__(self, *args, **kwargs): 2084 super(ResultsCacheTest, self).__init__(*args, **kwargs) 2085 self.fakeCacheReturnResult = None 2086 self.mock_logger = mock.Mock(spec=logger.Logger) 2087 self.mock_label = MockLabel( 2088 "mock_label", 2089 "build", 2090 "chromeos_image", 2091 "autotest_dir", 2092 "debug_dir", 2093 "/tmp", 2094 "lumpy", 2095 "remote", 2096 "image_args", 2097 "cache_dir", 2098 "average", 2099 "gcc", 2100 False, 2101 None, 2102 ) 2103 2104 def setUp(self): 2105 self.results_cache = ResultsCache() 2106 2107 mock_machine = machine_manager.MockCrosMachine( 2108 "falco.cros", "/tmp/chromeos", "average" 2109 ) 2110 2111 mock_mm = machine_manager.MockMachineManager( 2112 "/tmp/chromeos_root", 0, "average", "" 2113 ) 2114 mock_mm.machine_checksum_string[ 2115 "mock_label" 2116 ] = "fake_machine_checksum123" 2117 2118 self.results_cache.Init( 2119 self.mock_label.chromeos_image, 2120 self.mock_label.chromeos_root, 2121 "sunspider", 2122 1, # benchmark_run.iteration, 2123 "", # benchmark_run.test_args, 2124 "", # benchmark_run.profiler_args, 2125 mock_mm, 2126 mock_machine, 2127 self.mock_label.board, 2128 [ 2129 CacheConditions.CACHE_FILE_EXISTS, 2130 CacheConditions.CHECKSUMS_MATCH, 2131 ], 2132 self.mock_logger, 2133 "average", 2134 self.mock_label, 2135 "", # benchmark_run.share_cache 2136 "telemetry_Crosperf", 2137 True, # benchmark_run.show_all_results 2138 False, # benchmark_run.run_local 2139 "", 2140 ) # benchmark_run.cwp_dso 2141 2142 @mock.patch.object(image_checksummer.ImageChecksummer, "Checksum") 2143 def test_get_cache_dir_for_write(self, mock_checksum): 2144 def FakeGetMachines(label): 2145 if label: 2146 pass 2147 m1 = machine_manager.MockCrosMachine( 2148 "lumpy1.cros", self.results_cache.chromeos_root, "average" 2149 ) 2150 m2 = machine_manager.MockCrosMachine( 2151 "lumpy2.cros", self.results_cache.chromeos_root, "average" 2152 ) 2153 return [m1, m2] 2154 2155 mock_checksum.return_value = "FakeImageChecksumabc123" 2156 self.results_cache.machine_manager.GetMachines = FakeGetMachines 2157 self.results_cache.machine_manager.machine_checksum[ 2158 "mock_label" 2159 ] = "FakeMachineChecksumabc987" 2160 # Based on the label, benchmark and machines, get the directory in which 2161 # to store the cache information for this test run. 2162 result_path = self.results_cache.GetCacheDirForWrite() 2163 # Verify that the returned directory is correct (since the label 2164 # contained a cache_dir, named 'cache_dir', that's what is expected in 2165 # the result, rather than '~/cros_scratch'). 2166 comp_path = os.path.join( 2167 os.getcwd(), 2168 "cache_dir/54524606abaae4fdf7b02f49f7ae7127_" 2169 "sunspider_1_fda29412ceccb72977516c4785d08e2c_" 2170 "FakeImageChecksumabc123_FakeMachineChecksum" 2171 "abc987__6", 2172 ) 2173 self.assertEqual(result_path, comp_path) 2174 2175 def test_form_cache_dir(self): 2176 # This is very similar to the previous test (FormCacheDir is called 2177 # from GetCacheDirForWrite). 2178 cache_key_list = ( 2179 "54524606abaae4fdf7b02f49f7ae7127", 2180 "sunspider", 2181 "1", 2182 "7215ee9c7d9dc229d2921a40e899ec5f", 2183 "FakeImageChecksumabc123", 2184 "*", 2185 "*", 2186 "6", 2187 ) 2188 path = self.results_cache.FormCacheDir(cache_key_list) 2189 self.assertEqual(len(path), 1) 2190 path1 = path[0] 2191 test_dirname = ( 2192 "54524606abaae4fdf7b02f49f7ae7127_sunspider_1_7215ee9" 2193 "c7d9dc229d2921a40e899ec5f_FakeImageChecksumabc123_*_*_6" 2194 ) 2195 comp_path = os.path.join(os.getcwd(), "cache_dir", test_dirname) 2196 self.assertEqual(path1, comp_path) 2197 2198 @mock.patch.object(image_checksummer.ImageChecksummer, "Checksum") 2199 def test_get_cache_key_list(self, mock_checksum): 2200 # This tests the mechanism that generates the various pieces of the 2201 # cache directory name, based on various conditions. 2202 2203 def FakeGetMachines(label): 2204 if label: 2205 pass 2206 m1 = machine_manager.MockCrosMachine( 2207 "lumpy1.cros", self.results_cache.chromeos_root, "average" 2208 ) 2209 m2 = machine_manager.MockCrosMachine( 2210 "lumpy2.cros", self.results_cache.chromeos_root, "average" 2211 ) 2212 return [m1, m2] 2213 2214 mock_checksum.return_value = "FakeImageChecksumabc123" 2215 self.results_cache.machine_manager.GetMachines = FakeGetMachines 2216 self.results_cache.machine_manager.machine_checksum[ 2217 "mock_label" 2218 ] = "FakeMachineChecksumabc987" 2219 2220 # Test 1. Generating cache name for reading (not writing). 2221 key_list = self.results_cache.GetCacheKeyList(True) 2222 self.assertEqual(key_list[0], "*") # Machine checksum value, for read. 2223 self.assertEqual(key_list[1], "sunspider") 2224 self.assertEqual(key_list[2], "1") 2225 self.assertEqual(key_list[3], "fda29412ceccb72977516c4785d08e2c") 2226 self.assertEqual(key_list[4], "FakeImageChecksumabc123") 2227 self.assertEqual(key_list[5], "*") 2228 self.assertEqual(key_list[6], "*") 2229 self.assertEqual(key_list[7], "6") 2230 2231 # Test 2. Generating cache name for writing, with local image type. 2232 key_list = self.results_cache.GetCacheKeyList(False) 2233 self.assertEqual(key_list[0], "54524606abaae4fdf7b02f49f7ae7127") 2234 self.assertEqual(key_list[1], "sunspider") 2235 self.assertEqual(key_list[2], "1") 2236 self.assertEqual(key_list[3], "fda29412ceccb72977516c4785d08e2c") 2237 self.assertEqual(key_list[4], "FakeImageChecksumabc123") 2238 self.assertEqual(key_list[5], "FakeMachineChecksumabc987") 2239 self.assertEqual(key_list[6], "") 2240 self.assertEqual(key_list[7], "6") 2241 2242 # Test 3. Generating cache name for writing, with trybot image type. 2243 self.results_cache.label.image_type = "trybot" 2244 key_list = self.results_cache.GetCacheKeyList(False) 2245 self.assertEqual(key_list[0], "54524606abaae4fdf7b02f49f7ae7127") 2246 self.assertEqual(key_list[3], "fda29412ceccb72977516c4785d08e2c") 2247 self.assertEqual(key_list[4], "54524606abaae4fdf7b02f49f7ae7127") 2248 self.assertEqual(key_list[5], "FakeMachineChecksumabc987") 2249 2250 # Test 4. Generating cache name for writing, with official image type. 2251 self.results_cache.label.image_type = "official" 2252 key_list = self.results_cache.GetCacheKeyList(False) 2253 self.assertEqual(key_list[0], "54524606abaae4fdf7b02f49f7ae7127") 2254 self.assertEqual(key_list[1], "sunspider") 2255 self.assertEqual(key_list[2], "1") 2256 self.assertEqual(key_list[3], "fda29412ceccb72977516c4785d08e2c") 2257 self.assertEqual(key_list[4], "*") 2258 self.assertEqual(key_list[5], "FakeMachineChecksumabc987") 2259 self.assertEqual(key_list[6], "") 2260 self.assertEqual(key_list[7], "6") 2261 2262 # Test 5. Generating cache name for writing, with local image type, and 2263 # specifying that the image path must match the cached image path. 2264 self.results_cache.label.image_type = "local" 2265 self.results_cache.cache_conditions.append( 2266 CacheConditions.IMAGE_PATH_MATCH 2267 ) 2268 key_list = self.results_cache.GetCacheKeyList(False) 2269 self.assertEqual(key_list[0], "54524606abaae4fdf7b02f49f7ae7127") 2270 self.assertEqual(key_list[3], "fda29412ceccb72977516c4785d08e2c") 2271 self.assertEqual(key_list[4], "FakeImageChecksumabc123") 2272 self.assertEqual(key_list[5], "FakeMachineChecksumabc987") 2273 2274 @mock.patch.object(command_executer.CommandExecuter, "RunCommand") 2275 @mock.patch.object(os.path, "isdir") 2276 @mock.patch.object(Result, "CreateFromCacheHit") 2277 def test_read_result(self, mock_create, mock_isdir, mock_runcmd): 2278 self.fakeCacheReturnResult = None 2279 2280 def FakeGetCacheDirForRead(): 2281 return self.fakeCacheReturnResult 2282 2283 def FakeGetCacheDirForWrite(): 2284 return self.fakeCacheReturnResult 2285 2286 mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter) 2287 fake_result = Result( 2288 self.mock_logger, self.mock_label, "average", mock_cmd_exec 2289 ) 2290 fake_result.retval = 0 2291 2292 # Set up results_cache _GetCacheDirFor{Read,Write} to return 2293 # self.fakeCacheReturnResult, which is initially None (see above). 2294 # So initially, no cache dir is returned. 2295 self.results_cache.GetCacheDirForRead = FakeGetCacheDirForRead 2296 self.results_cache.GetCacheDirForWrite = FakeGetCacheDirForWrite 2297 2298 mock_isdir.return_value = True 2299 save_cc = [ 2300 CacheConditions.CACHE_FILE_EXISTS, 2301 CacheConditions.CHECKSUMS_MATCH, 2302 ] 2303 self.results_cache.cache_conditions.append(CacheConditions.FALSE) 2304 2305 # Test 1. CacheCondition.FALSE, which means do not read from the cache. 2306 # (force re-running of test). Result should be None. 2307 res = self.results_cache.ReadResult() 2308 self.assertIsNone(res) 2309 self.assertEqual(mock_runcmd.call_count, 1) 2310 2311 # Test 2. Remove CacheCondition.FALSE. Result should still be None, 2312 # because GetCacheDirForRead is returning None at the moment. 2313 mock_runcmd.reset_mock() 2314 self.results_cache.cache_conditions = save_cc 2315 res = self.results_cache.ReadResult() 2316 self.assertIsNone(res) 2317 self.assertEqual(mock_runcmd.call_count, 0) 2318 2319 # Test 3. Now set up cache dir to be returned by GetCacheDirForRead. 2320 # Since cache_dir is found, will call Result.CreateFromCacheHit, which 2321 # which will actually all our mock_create and should return fake_result. 2322 self.fakeCacheReturnResult = "fake/cache/dir" 2323 mock_create.return_value = fake_result 2324 res = self.results_cache.ReadResult() 2325 self.assertEqual(mock_runcmd.call_count, 0) 2326 self.assertEqual(res, fake_result) 2327 2328 # Test 4. os.path.isdir(cache_dir) will now return false, so result 2329 # should be None again (no cache found). 2330 mock_isdir.return_value = False 2331 res = self.results_cache.ReadResult() 2332 self.assertEqual(mock_runcmd.call_count, 0) 2333 self.assertIsNone(res) 2334 2335 # Test 5. os.path.isdir returns true, but mock_create now returns None 2336 # (the call to CreateFromCacheHit returns None), so overal result is 2337 # None. 2338 mock_isdir.return_value = True 2339 mock_create.return_value = None 2340 res = self.results_cache.ReadResult() 2341 self.assertEqual(mock_runcmd.call_count, 0) 2342 self.assertIsNone(res) 2343 2344 # Test 6. Everything works 'as expected', result should be fake_result. 2345 mock_create.return_value = fake_result 2346 res = self.results_cache.ReadResult() 2347 self.assertEqual(mock_runcmd.call_count, 0) 2348 self.assertEqual(res, fake_result) 2349 2350 # Test 7. The run failed; result should be None. 2351 mock_create.return_value = fake_result 2352 fake_result.retval = 1 2353 self.results_cache.cache_conditions.append( 2354 CacheConditions.RUN_SUCCEEDED 2355 ) 2356 res = self.results_cache.ReadResult() 2357 self.assertEqual(mock_runcmd.call_count, 0) 2358 self.assertIsNone(res) 2359 2360 2361if __name__ == "__main__": 2362 unittest.main() 2363