1# Lint as: python2, python3 2# Copyright (c) 2012 The Chromium OS Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5# 6# GLMark outputs a final performance score, and it checks the performance score 7# against minimum requirement if min_score is set. 8 9import logging 10import os 11import re 12 13from autotest_lib.client.bin import test, utils 14from autotest_lib.client.common_lib import error 15from autotest_lib.client.cros import service_stopper 16from autotest_lib.client.cros.graphics import graphics_utils 17 18GLMARK2_SCORE_RE = r'glmark2 Score: (\d+)' 19 20 21class graphics_GLMark2(graphics_utils.GraphicsTest): 22 """Runs glmark2, which benchmarks only calls compatible with OpenGL ES 2.0""" 23 version = 1 24 preserve_srcdir = True 25 _services = None 26 27 def setup(self): 28 self.job.setup_dep(['glmark2']) 29 30 def initialize(self): 31 super(graphics_GLMark2, self).initialize() 32 # If UI is running, we must stop it and restore later. 33 self._services = service_stopper.ServiceStopper(['ui']) 34 self._services.stop_services() 35 36 def cleanup(self): 37 if self._services: 38 self._services.restore_services() 39 super(graphics_GLMark2, self).cleanup() 40 41 @graphics_utils.GraphicsTest.failure_report_decorator('graphics_GLMark2') 42 def run_once(self, size='800x600', hasty=False, min_score=None): 43 dep = 'glmark2' 44 dep_dir = os.path.join(self.autodir, 'deps', dep) 45 self.job.install_pkg(dep, 'dep', dep_dir) 46 47 glmark2 = os.path.join(self.autodir, 'deps/glmark2/glmark2') 48 if not os.path.exists(glmark2): 49 raise error.TestFail('Failed: Could not find test binary.') 50 51 glmark2_data = os.path.join(self.autodir, 'deps/glmark2/data') 52 53 options = [] 54 options.append('--data-path %s' % glmark2_data) 55 options.append('--size %s' % size) 56 options.append('--annotate') 57 if hasty: 58 options.append('-b :duration=0.2') 59 else: 60 options.append('-b :duration=2') 61 cmd = glmark2 + ' ' + ' '.join(options) 62 63 if os.environ.get('CROS_FACTORY'): 64 from autotest_lib.client.cros import factory_setup_modules 65 from cros.factory.test import ui 66 ui.start_reposition_thread('^glmark') 67 68 # TODO(ihf): Switch this test to use perf.PerfControl like 69 # graphics_GLBench once it is stable. crbug.com/344766. 70 if not hasty: 71 if not utils.wait_for_idle_cpu(60.0, 0.1): 72 if not utils.wait_for_idle_cpu(20.0, 0.2): 73 raise error.TestFail('Failed: Could not get idle CPU.') 74 if not utils.wait_for_cool_machine(): 75 raise error.TestFail('Failed: Could not get cold machine.') 76 77 # In this test we are manually handling stderr, so expected=True. 78 # Strangely autotest takes CmdError/CmdTimeoutError as warning only. 79 try: 80 result = utils.run(cmd, 81 stderr_is_expected=True, 82 stdout_tee=utils.TEE_TO_LOGS, 83 stderr_tee=utils.TEE_TO_LOGS) 84 except error.CmdError: 85 raise error.TestFail('Failed: CmdError running %s' % cmd) 86 except error.CmdTimeoutError: 87 raise error.TestFail('Failed: CmdTimeout running %s' % cmd) 88 89 logging.info(result) 90 for line in result.stderr.splitlines(): 91 if line.startswith('Error:'): 92 # Line already starts with 'Error: ", not need to prepend. 93 raise error.TestFail(line) 94 95 # Numbers in hasty mode are not as reliable, so don't send them to 96 # the dashboard etc. 97 if not hasty: 98 keyvals = {} 99 score = None 100 # glmark2 output the final performance score as: 101 # glmark2 Score: 530 102 for line in result.stdout.splitlines(): 103 match = re.findall(GLMARK2_SCORE_RE, line) 104 if match: 105 score = int(match[0]) 106 if not score: 107 raise error.TestFail('Failed: Unable to read benchmark score') 108 # Output numbers for plotting by harness. 109 logging.info('GLMark2 score: %d', score) 110 if os.environ.get('CROS_FACTORY'): 111 from autotest_lib.client.cros import factory_setup_modules 112 from cros.factory.event_log import EventLog 113 EventLog('graphics_GLMark2').Log('glmark2_score', score=score) 114 keyvals['glmark2_score'] = score 115 self.write_perf_keyval(keyvals) 116 self.output_perf_value( 117 description='Score', 118 value=score, 119 units='score', 120 higher_is_better=True) 121 122 if min_score is not None and score < min_score: 123 raise error.TestFail( 124 'Failed: Benchmark score %d < %d (minimum score ' 125 'requirement)' % (score, min_score)) 126