1#!/usr/bin/env python3 2# Copyright 2017 gRPC authors. 3# 4# Licensed under the Apache License, Version 2.0 (the "License"); 5# you may not use this file except in compliance with the License. 6# You may obtain a copy of the License at 7# 8# http://www.apache.org/licenses/LICENSE-2.0 9# 10# Unless required by applicable law or agreed to in writing, software 11# distributed under the License is distributed on an "AS IS" BASIS, 12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13# See the License for the specific language governing permissions and 14# limitations under the License. 15"""Build and upload docker images to Google Container Registry per matrix.""" 16 17from __future__ import print_function 18 19import argparse 20import atexit 21import multiprocessing 22import os 23import shutil 24import subprocess 25import sys 26import tempfile 27 28# Language Runtime Matrix 29import client_matrix 30 31python_util_dir = os.path.abspath( 32 os.path.join(os.path.dirname(__file__), "../run_tests/python_utils") 33) 34sys.path.append(python_util_dir) 35import dockerjob 36import jobset 37 38_IMAGE_BUILDER = "tools/run_tests/dockerize/build_interop_image.sh" 39_LANGUAGES = list(client_matrix.LANG_RUNTIME_MATRIX.keys()) 40# All gRPC release tags, flattened, deduped and sorted. 41_RELEASES = sorted( 42 list( 43 set( 44 release 45 for release_dict in list(client_matrix.LANG_RELEASE_MATRIX.values()) 46 for release in list(release_dict.keys()) 47 ) 48 ) 49) 50 51# Destination directory inside docker image to keep extra info from build time. 52_BUILD_INFO = "/var/local/build_info" 53 54argp = argparse.ArgumentParser(description="Run interop tests.") 55argp.add_argument( 56 "--gcr_path", 57 default="gcr.io/grpc-testing", 58 help="Path of docker images in Google Container Registry", 59) 60 61argp.add_argument( 62 "--release", 63 default="master", 64 choices=["all", "master"] + _RELEASES, 65 help=( 66 "github commit tag to checkout. When building all " 67 'releases defined in client_matrix.py, use "all". Valid only ' 68 "with --git_checkout." 69 ), 70) 71 72argp.add_argument( 73 "-l", 74 "--language", 75 choices=["all"] + sorted(_LANGUAGES), 76 nargs="+", 77 default=["all"], 78 help="Test languages to build docker images for.", 79) 80 81argp.add_argument( 82 "--git_checkout", 83 action="store_true", 84 help=( 85 "Use a separate git clone tree for building grpc stack. " 86 "Required when using --release flag. By default, current" 87 "tree and the sibling will be used for building grpc stack." 88 ), 89) 90 91argp.add_argument( 92 "--git_checkout_root", 93 default="/export/hda3/tmp/grpc_matrix", 94 help=( 95 "Directory under which grpc-go/java/main repo will be " 96 "cloned. Valid only with --git_checkout." 97 ), 98) 99 100argp.add_argument( 101 "--keep", 102 action="store_true", 103 help="keep the created local images after uploading to GCR", 104) 105 106argp.add_argument( 107 "--reuse_git_root", 108 default=False, 109 action="store_const", 110 const=True, 111 help=( 112 "reuse the repo dir. If False, the existing git root " 113 "directory will removed before a clean checkout, because " 114 "reusing the repo can cause git checkout error if you switch " 115 "between releases." 116 ), 117) 118 119argp.add_argument( 120 "--upload_images", 121 action="store_true", 122 help=( 123 "If set, images will be uploaded to container registry after building." 124 ), 125) 126 127args = argp.parse_args() 128 129 130def add_files_to_image(image, with_files, label=None): 131 """Add files to a docker image. 132 133 image: docker image name, i.e. grpc_interop_java:26328ad8 134 with_files: additional files to include in the docker image. 135 label: label string to attach to the image. 136 """ 137 tag_idx = image.find(":") 138 if tag_idx == -1: 139 jobset.message( 140 "FAILED", "invalid docker image %s" % image, do_newline=True 141 ) 142 sys.exit(1) 143 orig_tag = "%s_" % image 144 subprocess.check_output(["docker", "tag", image, orig_tag]) 145 146 lines = ["FROM " + orig_tag] 147 if label: 148 lines.append("LABEL %s" % label) 149 150 temp_dir = tempfile.mkdtemp() 151 atexit.register(lambda: subprocess.call(["rm", "-rf", temp_dir])) 152 153 # Copy with_files inside the tmp directory, which will be the docker build 154 # context. 155 for f in with_files: 156 shutil.copy(f, temp_dir) 157 lines.append("COPY %s %s/" % (os.path.basename(f), _BUILD_INFO)) 158 159 # Create a Dockerfile. 160 with open(os.path.join(temp_dir, "Dockerfile"), "w") as f: 161 f.write("\n".join(lines)) 162 163 jobset.message("START", "Repackaging %s" % image, do_newline=True) 164 build_cmd = ["docker", "build", "--rm", "--tag", image, temp_dir] 165 subprocess.check_output(build_cmd) 166 dockerjob.remove_image(orig_tag, skip_nonexistent=True) 167 168 169def build_image_jobspec(runtime, env, gcr_tag, stack_base): 170 """Build interop docker image for a language with runtime. 171 172 runtime: a <lang><version> string, for example go1.8. 173 env: dictionary of env to passed to the build script. 174 gcr_tag: the tag for the docker image (i.e. v1.3.0). 175 stack_base: the local gRPC repo path. 176 """ 177 basename = "grpc_interop_%s" % runtime 178 tag = "%s/%s:%s" % (args.gcr_path, basename, gcr_tag) 179 build_env = {"INTEROP_IMAGE": tag, "BASE_NAME": basename} 180 build_env.update(env) 181 image_builder_path = _IMAGE_BUILDER 182 if client_matrix.should_build_docker_interop_image_from_release_tag(lang): 183 image_builder_path = os.path.join(stack_base, _IMAGE_BUILDER) 184 build_job = jobset.JobSpec( 185 cmdline=[image_builder_path], 186 environ=build_env, 187 shortname="build_docker_%s" % runtime, 188 timeout_seconds=30 * 60, 189 ) 190 build_job.tag = tag 191 return build_job 192 193 194def build_all_images_for_lang(lang): 195 """Build all docker images for a language across releases and runtimes.""" 196 if not args.git_checkout: 197 if args.release != "master": 198 print( 199 "Cannot use --release without also enabling --git_checkout.\n" 200 ) 201 sys.exit(1) 202 releases = [args.release] 203 else: 204 if args.release == "all": 205 releases = client_matrix.get_release_tags(lang) 206 else: 207 # Build a particular release. 208 if args.release not in ["master"] + client_matrix.get_release_tags( 209 lang 210 ): 211 jobset.message( 212 "SKIPPED", 213 "%s for %s is not defined" % (args.release, lang), 214 do_newline=True, 215 ) 216 return [] 217 releases = [args.release] 218 219 images = [] 220 for release in releases: 221 images += build_all_images_for_release(lang, release) 222 jobset.message( 223 "SUCCESS", 224 "All docker images built for %s at %s." % (lang, releases), 225 do_newline=True, 226 ) 227 return images 228 229 230def build_all_images_for_release(lang, release): 231 """Build all docker images for a release across all runtimes.""" 232 docker_images = [] 233 build_jobs = [] 234 235 env = {} 236 # If we not using current tree or the sibling for grpc stack, do checkout. 237 stack_base = "" 238 if args.git_checkout: 239 stack_base = checkout_grpc_stack(lang, release) 240 var = { 241 "go": "GRPC_GO_ROOT", 242 "java": "GRPC_JAVA_ROOT", 243 "node": "GRPC_NODE_ROOT", 244 }.get(lang, "GRPC_ROOT") 245 env[var] = stack_base 246 247 for runtime in client_matrix.get_runtimes_for_lang_release(lang, release): 248 job = build_image_jobspec(runtime, env, release, stack_base) 249 docker_images.append(job.tag) 250 build_jobs.append(job) 251 252 jobset.message("START", "Building interop docker images.", do_newline=True) 253 print("Jobs to run: \n%s\n" % "\n".join(str(j) for j in build_jobs)) 254 255 num_failures, _ = jobset.run( 256 build_jobs, newline_on_success=True, maxjobs=multiprocessing.cpu_count() 257 ) 258 if num_failures: 259 jobset.message( 260 "FAILED", "Failed to build interop docker images.", do_newline=True 261 ) 262 docker_images_cleanup.extend(docker_images) 263 sys.exit(1) 264 265 jobset.message( 266 "SUCCESS", 267 "All docker images built for %s at %s." % (lang, release), 268 do_newline=True, 269 ) 270 271 if release != "master": 272 commit_log = os.path.join(stack_base, "commit_log") 273 if os.path.exists(commit_log): 274 for image in docker_images: 275 add_files_to_image(image, [commit_log], "release=%s" % release) 276 return docker_images 277 278 279def cleanup(): 280 if not args.keep: 281 for image in docker_images_cleanup: 282 dockerjob.remove_image(image, skip_nonexistent=True) 283 284 285docker_images_cleanup = [] 286atexit.register(cleanup) 287 288 289def maybe_apply_patches_on_git_tag(stack_base, lang, release): 290 files_to_patch = [] 291 292 release_info = client_matrix.LANG_RELEASE_MATRIX[lang].get(release) 293 if release_info: 294 files_to_patch = release_info.patch 295 if not files_to_patch: 296 return 297 patch_file_relative_path = "patches/%s_%s/git_repo.patch" % (lang, release) 298 patch_file = os.path.abspath( 299 os.path.join(os.path.dirname(__file__), patch_file_relative_path) 300 ) 301 if not os.path.exists(patch_file): 302 jobset.message( 303 "FAILED", "expected patch file |%s| to exist" % patch_file 304 ) 305 sys.exit(1) 306 subprocess.check_output( 307 ["git", "apply", patch_file], cwd=stack_base, stderr=subprocess.STDOUT 308 ) 309 310 # TODO(jtattermusch): this really would need simplification and refactoring 311 # - "git add" and "git commit" can easily be done in a single command 312 # - it looks like the only reason for the existence of the "files_to_patch" 313 # entry is to perform "git add" - which is clumsy and fragile. 314 # - we only allow a single patch with name "git_repo.patch". A better design 315 # would be to allow multiple patches that can have more descriptive names. 316 for repo_relative_path in files_to_patch: 317 subprocess.check_output( 318 ["git", "add", repo_relative_path], 319 cwd=stack_base, 320 stderr=subprocess.STDOUT, 321 ) 322 subprocess.check_output( 323 [ 324 "git", 325 "commit", 326 "-m", 327 "Hack performed on top of %s git " 328 "tag in order to build and run the %s " 329 "interop tests on that tag." % (lang, release), 330 ], 331 cwd=stack_base, 332 stderr=subprocess.STDOUT, 333 ) 334 335 336def checkout_grpc_stack(lang, release): 337 """Invokes 'git check' for the lang/release and returns directory created.""" 338 assert args.git_checkout and args.git_checkout_root 339 340 if not os.path.exists(args.git_checkout_root): 341 os.makedirs(args.git_checkout_root) 342 343 repo = client_matrix.get_github_repo(lang) 344 # Get the subdir name part of repo 345 # For example, '[email protected]:grpc/grpc-go.git' should use 'grpc-go'. 346 repo_dir = os.path.splitext(os.path.basename(repo))[0] 347 stack_base = os.path.join(args.git_checkout_root, repo_dir) 348 349 # Clean up leftover repo dir if necessary. 350 if not args.reuse_git_root and os.path.exists(stack_base): 351 jobset.message("START", "Removing git checkout root.", do_newline=True) 352 shutil.rmtree(stack_base) 353 354 if not os.path.exists(stack_base): 355 subprocess.check_call( 356 ["git", "clone", "--recursive", repo], 357 cwd=os.path.dirname(stack_base), 358 ) 359 360 # git checkout. 361 jobset.message( 362 "START", 363 "git checkout %s from %s" % (release, stack_base), 364 do_newline=True, 365 ) 366 # We should NEVER do checkout on current tree !!! 367 assert not os.path.dirname(__file__).startswith(stack_base) 368 output = subprocess.check_output( 369 ["git", "checkout", release], cwd=stack_base, stderr=subprocess.STDOUT 370 ) 371 maybe_apply_patches_on_git_tag(stack_base, lang, release) 372 commit_log = subprocess.check_output(["git", "log", "-1"], cwd=stack_base) 373 jobset.message( 374 "SUCCESS", 375 "git checkout", 376 "%s: %s" % (str(output), commit_log), 377 do_newline=True, 378 ) 379 380 # git submodule update 381 jobset.message( 382 "START", 383 "git submodule update --init at %s from %s" % (release, stack_base), 384 do_newline=True, 385 ) 386 subprocess.check_call( 387 ["git", "submodule", "update", "--init"], 388 cwd=stack_base, 389 stderr=subprocess.STDOUT, 390 ) 391 jobset.message( 392 "SUCCESS", 393 "git submodule update --init", 394 "%s: %s" % (str(output), commit_log), 395 do_newline=True, 396 ) 397 398 # Write git log to commit_log so it can be packaged with the docker image. 399 with open(os.path.join(stack_base, "commit_log"), "wb") as f: 400 f.write(commit_log) 401 return stack_base 402 403 404languages = args.language if args.language != ["all"] else _LANGUAGES 405for lang in languages: 406 docker_images = build_all_images_for_lang(lang) 407 for image in docker_images: 408 if args.upload_images: 409 jobset.message("START", "Uploading %s" % image, do_newline=True) 410 # docker image name must be in the format <gcr_path>/<image>:<gcr_tag> 411 assert image.startswith(args.gcr_path) and image.find(":") != -1 412 subprocess.call(["gcloud", "docker", "--", "push", image]) 413 else: 414 # Uploading (and overwriting images) by default can easily break things. 415 print( 416 "Not uploading image %s, run with --upload_images to upload." 417 % image 418 ) 419