1#!/usr/bin/env vpython3 2# 3# Copyright 2015 The Chromium Authors 4# Use of this source code is governed by a BSD-style license that can be 5# found in the LICENSE file. 6 7"""Install *_incremental.apk targets as well as their dependent files.""" 8 9import argparse 10import collections 11import functools 12import glob 13import hashlib 14import json 15import logging 16import os 17import posixpath 18import shutil 19import sys 20 21sys.path.append( 22 os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) 23import devil_chromium 24from devil.android import apk_helper 25from devil.android import device_utils 26from devil.utils import reraiser_thread 27from devil.utils import run_tests_helper 28from pylib import constants 29from pylib.utils import time_profile 30 31prev_sys_path = list(sys.path) 32sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) 33import dex 34from util import build_utils 35sys.path = prev_sys_path 36 37 38_R8_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib', 39 'r8.jar') 40_SHARD_JSON_FILENAME = 'shards.json' 41 42 43def _DeviceCachePath(device): 44 file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial() 45 return os.path.join(constants.GetOutDirectory(), file_name) 46 47 48def _Execute(concurrently, *funcs): 49 """Calls all functions in |funcs| concurrently or in sequence.""" 50 timer = time_profile.TimeProfile() 51 if concurrently: 52 reraiser_thread.RunAsync(funcs) 53 else: 54 for f in funcs: 55 f() 56 timer.Stop(log=False) 57 return timer 58 59 60def _GetDeviceIncrementalDir(package): 61 """Returns the device path to put incremental files for the given package.""" 62 return '/data/local/tmp/incremental-app-%s' % package 63 64 65def _IsStale(src_paths, old_src_paths, dest_path): 66 """Returns if |dest| is older than any of |src_paths|, or missing.""" 67 if not os.path.exists(dest_path): 68 return True 69 # Always mark as stale if any paths were added or removed. 70 if set(src_paths) != set(old_src_paths): 71 return True 72 dest_time = os.path.getmtime(dest_path) 73 for path in src_paths: 74 if os.path.getmtime(path) > dest_time: 75 return True 76 return False 77 78 79def _LoadPrevShards(dex_staging_dir): 80 shards_json_path = os.path.join(dex_staging_dir, _SHARD_JSON_FILENAME) 81 if not os.path.exists(shards_json_path): 82 return {} 83 with open(shards_json_path) as f: 84 return json.load(f) 85 86 87def _SaveNewShards(shards, dex_staging_dir): 88 shards_json_path = os.path.join(dex_staging_dir, _SHARD_JSON_FILENAME) 89 with open(shards_json_path, 'w') as f: 90 json.dump(shards, f) 91 92 93def _AllocateDexShards(dex_files): 94 """Divides input dex files into buckets.""" 95 # Goals: 96 # * Make shards small enough that they are fast to merge. 97 # * Minimize the number of shards so they load quickly on device. 98 # * Partition files into shards such that a change in one file results in only 99 # one shard having to be re-created. 100 shards = collections.defaultdict(list) 101 # As of Oct 2019, 10 shards results in a min/max size of 582K/2.6M. 102 NUM_CORE_SHARDS = 10 103 # As of Oct 2019, 17 dex files are larger than 1M. 104 SHARD_THRESHOLD = 2**20 105 for src_path in dex_files: 106 if os.path.getsize(src_path) >= SHARD_THRESHOLD: 107 # Use the path as the name rather than an incrementing number to ensure 108 # that it shards to the same name every time. 109 name = os.path.relpath(src_path, constants.GetOutDirectory()).replace( 110 os.sep, '.') 111 shards[name].append(src_path) 112 else: 113 # The stdlib hash(string) function is salted differently across python3 114 # invocations. Thus we use md5 instead to consistently shard the same 115 # file to the same shard across runs. 116 hex_hash = hashlib.md5(src_path.encode('utf-8')).hexdigest() 117 name = 'shard{}.dex.jar'.format(int(hex_hash, 16) % NUM_CORE_SHARDS) 118 shards[name].append(src_path) 119 logging.info('Sharding %d dex files into %d buckets', len(dex_files), 120 len(shards)) 121 return shards 122 123 124def _CreateDexFiles(shards, prev_shards, dex_staging_dir, min_api, 125 use_concurrency): 126 """Creates dex files within |dex_staging_dir| defined by |shards|.""" 127 tasks = [] 128 for name, src_paths in shards.items(): 129 dest_path = os.path.join(dex_staging_dir, name) 130 if _IsStale(src_paths=src_paths, 131 old_src_paths=prev_shards.get(name, []), 132 dest_path=dest_path): 133 tasks.append( 134 functools.partial(dex.MergeDexForIncrementalInstall, _R8_PATH, 135 src_paths, dest_path, min_api)) 136 137 # TODO(agrieve): It would be more performant to write a custom d8.jar 138 # wrapper in java that would process these in bulk, rather than spinning 139 # up a new process for each one. 140 _Execute(use_concurrency, *tasks) 141 142 # Remove any stale shards. 143 for name in os.listdir(dex_staging_dir): 144 if name not in shards: 145 os.unlink(os.path.join(dex_staging_dir, name)) 146 147 148def Uninstall(device, package, enable_device_cache=False): 149 """Uninstalls and removes all incremental files for the given package.""" 150 main_timer = time_profile.TimeProfile() 151 device.Uninstall(package) 152 if enable_device_cache: 153 # Uninstall is rare, so just wipe the cache in this case. 154 cache_path = _DeviceCachePath(device) 155 if os.path.exists(cache_path): 156 os.unlink(cache_path) 157 device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)], 158 check_return=True) 159 logging.info('Uninstall took %s seconds.', main_timer.GetDelta()) 160 161 162def Install(device, install_json, apk=None, enable_device_cache=False, 163 use_concurrency=True, permissions=()): 164 """Installs the given incremental apk and all required supporting files. 165 166 Args: 167 device: A DeviceUtils instance (to install to). 168 install_json: Path to .json file or already parsed .json object. 169 apk: An existing ApkHelper instance for the apk (optional). 170 enable_device_cache: Whether to enable on-device caching of checksums. 171 use_concurrency: Whether to speed things up using multiple threads. 172 permissions: A list of the permissions to grant, or None to grant all 173 non-denylisted permissions in the manifest. 174 """ 175 if isinstance(install_json, str): 176 with open(install_json) as f: 177 install_dict = json.load(f) 178 else: 179 install_dict = install_json 180 181 main_timer = time_profile.TimeProfile() 182 install_timer = time_profile.TimeProfile() 183 push_native_timer = time_profile.TimeProfile() 184 merge_dex_timer = time_profile.TimeProfile() 185 push_dex_timer = time_profile.TimeProfile() 186 187 def fix_path(p): 188 return os.path.normpath(os.path.join(constants.GetOutDirectory(), p)) 189 190 if not apk: 191 apk = apk_helper.ToHelper(fix_path(install_dict['apk_path'])) 192 split_globs = [fix_path(p) for p in install_dict['split_globs']] 193 native_libs = [fix_path(p) for p in install_dict['native_libs']] 194 dex_files = [fix_path(p) for p in install_dict['dex_files']] 195 show_proguard_warning = install_dict.get('show_proguard_warning') 196 197 apk_package = apk.GetPackageName() 198 device_incremental_dir = _GetDeviceIncrementalDir(apk_package) 199 dex_staging_dir = os.path.join(constants.GetOutDirectory(), 200 'incremental-install', 201 install_dict['apk_path']) 202 device_dex_dir = posixpath.join(device_incremental_dir, 'dex') 203 204 # Install .apk(s) if any of them have changed. 205 def do_install(): 206 install_timer.Start() 207 if split_globs: 208 splits = [] 209 for split_glob in split_globs: 210 splits.extend((f for f in glob.glob(split_glob))) 211 device.InstallSplitApk( 212 apk, 213 splits, 214 allow_downgrade=True, 215 reinstall=True, 216 allow_cached_props=True, 217 permissions=permissions) 218 else: 219 device.Install( 220 apk, allow_downgrade=True, reinstall=True, permissions=permissions) 221 install_timer.Stop(log=False) 222 223 # Push .so and .dex files to the device (if they have changed). 224 def do_push_files(): 225 226 def do_push_native(): 227 push_native_timer.Start() 228 if native_libs: 229 with build_utils.TempDir() as temp_dir: 230 device_lib_dir = posixpath.join(device_incremental_dir, 'lib') 231 for path in native_libs: 232 # Note: Can't use symlinks as they don't work when 233 # "adb push parent_dir" is used (like we do here). 234 shutil.copy(path, os.path.join(temp_dir, os.path.basename(path))) 235 device.PushChangedFiles([(temp_dir, device_lib_dir)], 236 delete_device_stale=True) 237 push_native_timer.Stop(log=False) 238 239 def do_merge_dex(): 240 merge_dex_timer.Start() 241 prev_shards = _LoadPrevShards(dex_staging_dir) 242 shards = _AllocateDexShards(dex_files) 243 build_utils.MakeDirectory(dex_staging_dir) 244 _CreateDexFiles(shards, prev_shards, dex_staging_dir, 245 apk.GetMinSdkVersion(), use_concurrency) 246 # New shard information must be saved after _CreateDexFiles since 247 # _CreateDexFiles removes all non-dex files from the staging dir. 248 _SaveNewShards(shards, dex_staging_dir) 249 merge_dex_timer.Stop(log=False) 250 251 def do_push_dex(): 252 push_dex_timer.Start() 253 device.PushChangedFiles([(dex_staging_dir, device_dex_dir)], 254 delete_device_stale=True) 255 push_dex_timer.Stop(log=False) 256 257 _Execute(use_concurrency, do_push_native, do_merge_dex) 258 do_push_dex() 259 260 cache_path = _DeviceCachePath(device) 261 def restore_cache(): 262 if not enable_device_cache: 263 return 264 if os.path.exists(cache_path): 265 logging.info('Using device cache: %s', cache_path) 266 with open(cache_path) as f: 267 device.LoadCacheData(f.read()) 268 # Delete the cached file so that any exceptions cause it to be cleared. 269 os.unlink(cache_path) 270 else: 271 logging.info('No device cache present: %s', cache_path) 272 273 def save_cache(): 274 if not enable_device_cache: 275 return 276 with open(cache_path, 'w') as f: 277 f.write(device.DumpCacheData()) 278 logging.info('Wrote device cache: %s', cache_path) 279 280 # Create 2 lock files: 281 # * install.lock tells the app to pause on start-up (until we release it). 282 # * firstrun.lock is used by the app to pause all secondary processes until 283 # the primary process finishes loading the .dex / .so files. 284 def create_lock_files(): 285 # Creates or zeros out lock files. 286 cmd = ('D="%s";' 287 'mkdir -p $D &&' 288 'echo -n >$D/install.lock 2>$D/firstrun.lock') 289 device.RunShellCommand( 290 cmd % device_incremental_dir, shell=True, check_return=True) 291 292 # The firstrun.lock is released by the app itself. 293 def release_installer_lock(): 294 device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir, 295 check_return=True, shell=True) 296 297 # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't 298 # been designed for multi-threading. Enabling only because this is a 299 # developer-only tool. 300 setup_timer = _Execute(use_concurrency, create_lock_files, restore_cache) 301 302 _Execute(use_concurrency, do_install, do_push_files) 303 304 finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache) 305 306 logging.info( 307 'Install of %s took %s seconds (setup=%s, install=%s, lib_push=%s, ' 308 'dex_merge=%s dex_push=%s, finalize=%s)', os.path.basename(apk.path), 309 main_timer.GetDelta(), setup_timer.GetDelta(), install_timer.GetDelta(), 310 push_native_timer.GetDelta(), merge_dex_timer.GetDelta(), 311 push_dex_timer.GetDelta(), finalize_timer.GetDelta()) 312 if show_proguard_warning: 313 logging.warning('Target had proguard enabled, but incremental install uses ' 314 'non-proguarded .dex files. Performance characteristics ' 315 'may differ.') 316 317 318def main(): 319 parser = argparse.ArgumentParser() 320 parser.add_argument('json_path', 321 help='The path to the generated incremental apk .json.') 322 parser.add_argument('-d', '--device', dest='device', 323 help='Target device for apk to install on.') 324 parser.add_argument('--uninstall', 325 action='store_true', 326 default=False, 327 help='Remove the app and all side-loaded files.') 328 parser.add_argument('--output-directory', 329 help='Path to the root build directory.') 330 parser.add_argument('--no-threading', 331 action='store_false', 332 default=True, 333 dest='threading', 334 help='Do not install and push concurrently') 335 parser.add_argument('--no-cache', 336 action='store_false', 337 default=True, 338 dest='cache', 339 help='Do not use cached information about what files are ' 340 'currently on the target device.') 341 parser.add_argument('-v', 342 '--verbose', 343 dest='verbose_count', 344 default=0, 345 action='count', 346 help='Verbose level (multiple times for more)') 347 348 args = parser.parse_args() 349 350 run_tests_helper.SetLogLevel(args.verbose_count) 351 if args.output_directory: 352 constants.SetOutputDirectory(args.output_directory) 353 354 devil_chromium.Initialize(output_directory=constants.GetOutDirectory()) 355 356 # Retries are annoying when commands fail for legitimate reasons. Might want 357 # to enable them if this is ever used on bots though. 358 device = device_utils.DeviceUtils.HealthyDevices( 359 device_arg=args.device, 360 default_retries=0, 361 enable_device_files_cache=True)[0] 362 363 if args.uninstall: 364 with open(args.json_path) as f: 365 install_dict = json.load(f) 366 apk = apk_helper.ToHelper(install_dict['apk_path']) 367 Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache) 368 else: 369 Install(device, args.json_path, enable_device_cache=args.cache, 370 use_concurrency=args.threading) 371 372 373if __name__ == '__main__': 374 sys.exit(main()) 375