xref: /aosp_15_r20/external/skia/infra/bots/gen_tasks_logic/gen_tasks_logic.go (revision c8dee2aa9b3f27cf6c858bd81872bdeb2c07ed17)
1// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package gen_tasks_logic
6
7/*
8	Generate the tasks.json file.
9*/
10
11import (
12	"encoding/json"
13	"fmt"
14	"io/ioutil"
15	"log"
16	"path"
17	"path/filepath"
18	"regexp"
19	"runtime"
20	"sort"
21	"strconv"
22	"strings"
23	"time"
24
25	"go.skia.org/infra/go/cas/rbe"
26	"go.skia.org/infra/go/cipd"
27	"go.skia.org/infra/task_scheduler/go/specs"
28	"go.skia.org/skia/bazel/device_specific_configs"
29)
30
31const (
32	CAS_BAZEL         = "bazel"
33	CAS_CANVASKIT     = "canvaskit"
34	CAS_COMPILE       = "compile"
35	CAS_EMPTY         = "empty" // TODO(borenet): It'd be nice if this wasn't necessary.
36	CAS_LOTTIE_CI     = "lottie-ci"
37	CAS_LOTTIE_WEB    = "lottie-web"
38	CAS_PATHKIT       = "pathkit"
39	CAS_PERF          = "perf"
40	CAS_PUPPETEER     = "puppeteer"
41	CAS_RUN_RECIPE    = "run-recipe"
42	CAS_RECIPES       = "recipes"
43	CAS_RECREATE_SKPS = "recreate-skps"
44	CAS_SKOTTIE_WASM  = "skottie-wasm"
45	CAS_TASK_DRIVERS  = "task-drivers"
46	CAS_TEST          = "test"
47	CAS_WASM_GM       = "wasm-gm"
48	CAS_WHOLE_REPO    = "whole-repo"
49
50	BUILD_TASK_DRIVERS_PREFIX  = "Housekeeper-PerCommit-BuildTaskDrivers"
51	BUNDLE_RECIPES_NAME        = "Housekeeper-PerCommit-BundleRecipes"
52	ISOLATE_GCLOUD_LINUX_NAME  = "Housekeeper-PerCommit-IsolateGCloudLinux"
53	ISOLATE_SKIMAGE_NAME       = "Housekeeper-PerCommit-IsolateSkImage"
54	ISOLATE_SKP_NAME           = "Housekeeper-PerCommit-IsolateSKP"
55	ISOLATE_MSKP_NAME          = "Housekeeper-PerCommit-IsolateMSKP"
56	ISOLATE_SVG_NAME           = "Housekeeper-PerCommit-IsolateSVG"
57	ISOLATE_NDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
58	ISOLATE_SDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
59	ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
60
61	DEBIAN_11_OS                   = "Debian-11.5"
62	DEFAULT_OS_DEBIAN              = "Debian-10.10"
63	DEFAULT_OS_LINUX_GCE           = "Debian-10.3"
64	OLD_OS_LINUX_GCE               = "Debian-9.8"
65	COMPILE_TASK_NAME_OS_LINUX     = "Debian10"
66	COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
67	DEFAULT_OS_MAC                 = "Mac-14.5"
68	DEFAULT_OS_WIN_GCE             = "Windows-Server-17763"
69
70	// Small is a 2-core machine.
71	// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
72	MACHINE_TYPE_SMALL = "n1-highmem-2"
73	// Medium is a 16-core machine
74	MACHINE_TYPE_MEDIUM = "n1-standard-16"
75	// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
76	// any of our tasks.)
77	MACHINE_TYPE_LARGE = "n1-highcpu-64"
78
79	// Swarming output dirs.
80	OUTPUT_NONE          = "output_ignored" // This will result in outputs not being isolated.
81	OUTPUT_BUILD         = "build"
82	OUTPUT_BUILD_NOPATCH = "build_nopatch"
83	OUTPUT_TEST          = "test"
84	OUTPUT_PERF          = "perf"
85	OUTPUT_BAZEL         = "bazel_output"
86
87	// Name prefix for upload jobs.
88	PREFIX_UPLOAD = "Upload"
89
90	// This will have to kept in sync with the kMin_Version in
91	// src/core/SkPicturePriv.h
92	// See the comment in that file on how to find the version to use here.
93	oldestSupportedSkpVersion = 293
94
95	// bazelCacheDirOnGCELinux is the path where Bazel should write its cache on Linux GCE machines.
96	// The Bazel cache can grow large (>10GB), so this should be in a partition with enough free
97	// space. On Linux GCE machines, the partition mounted at /mnt/pd0 is significantly larger than
98	// the partition mounted at /.
99	bazelCacheDirOnGCELinux = "/mnt/pd0/bazel_cache"
100
101	// bazelCacheDirOnSkoloLinux is like bazelCacheDirOnGCELinux for Skolo Linux machines. Unlike GCE
102	// Linux machines, the partition mounted at / on Skolo Linux machines is large enough. While
103	// using the default Bazel cache path would work, our Bazel task drivers demand an explicit path.
104	// We store the Bazel cache at /home/chrome-bot/bazel_cache rather than on the default location
105	// of /home/chrome-bot/cache/.bazel to make it obvious to someone examining a Skolo machine that
106	// we are overriding the default location.
107	bazelCacheDirOnSkoloLinux = "/home/chrome-bot/bazel_cache"
108
109	// bazelCacheDirOnWindows is like bazelCacheDirOnSkoloLinux. Unlike GCE Linux machines, we only
110	// have a single partition. While using the default cache path would work, our Bazel task
111	// drivers demand an explicit path. We store the Bazel cache at /home/chrome-bot/bazel_cache
112	// rather than on the default location of %APPDATA% to make it obvious to someone examining a
113	// Skolo machine that we are overriding the default location. Note that double-escaping the
114	// path separator is necessary because this string is passed to Bazel via multiple levels of
115	// subprocesses.
116	bazelCacheDirOnWindows = `C:\\Users\\chrome-bot\\bazel_cache`
117)
118
119var (
120	// "Constants"
121
122	// Named caches used by tasks.
123	CACHES_GIT = []*specs.Cache{
124		{
125			Name: "git",
126			Path: "cache/git",
127		},
128		{
129			Name: "git_cache",
130			Path: "cache/git_cache",
131		},
132	}
133	CACHES_GO = []*specs.Cache{
134		{
135			Name: "go_cache",
136			Path: "cache/go_cache",
137		},
138		{
139			Name: "gopath",
140			Path: "cache/gopath",
141		},
142	}
143	CACHES_WORKDIR = []*specs.Cache{
144		{
145			Name: "work",
146			Path: "cache/work",
147		},
148	}
149	CACHES_CCACHE = []*specs.Cache{
150		{
151			Name: "ccache",
152			Path: "cache/ccache",
153		},
154	}
155	// The "docker" cache is used as a persistent working directory for
156	// tasks which use Docker. It is not to be confused with Docker's own
157	// cache, which stores images. We do not currently use a named Swarming
158	// cache for the latter.
159	// TODO(borenet): We should ensure that any task which uses Docker does
160	// not also use the normal "work" cache, to prevent issues like
161	// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
162	CACHES_DOCKER = []*specs.Cache{
163		{
164			Name: "docker",
165			Path: "cache/docker",
166		},
167	}
168
169	// CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for
170	// lottie-ci.  This is global so that it can be overridden by other
171	// repositories which import this file.
172	CAS_SPEC_LOTTIE_CI = &specs.CasSpec{
173		Root: "..",
174		Paths: []string{
175			"skia/.vpython3",
176			"skia/infra/bots/run_recipe.py",
177			"skia/infra/lottiecap",
178			"skia/tools/lottie-web-perf",
179			"skia/tools/lottiecap",
180		},
181		Excludes: []string{rbe.ExcludeGitDir},
182	}
183
184	// CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is
185	// global so that it can be overridden by other repositories which import
186	// this file.
187	CAS_SPEC_WHOLE_REPO = &specs.CasSpec{
188		Root:     "..",
189		Paths:    []string{"skia"},
190		Excludes: []string{rbe.ExcludeGitDir},
191	}
192
193	// TODO(borenet): This hacky and bad.
194	CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}")
195
196	CIPD_PKGS_GOLDCTL = cipd.MustGetPackage("skia/tools/goldctl/${platform}")
197
198	CIPD_PKGS_XCODE = []*specs.CipdPackage{
199		// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
200		// This package is really just an installer for XCode.
201		{
202			Name: "infra/tools/mac_toolchain/${platform}",
203			Path: "mac_toolchain",
204			// When this is updated, also update
205			// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
206			Version: "git_revision:e6f45bde6c5ee56924b1f905159b6a1a48ef25dd",
207		},
208	}
209
210	// These properties are required by some tasks, eg. for running
211	// bot_update, but they prevent de-duplication, so they should only be
212	// used where necessary.
213	EXTRA_PROPS = map[string]string{
214		"buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
215		"patch_issue":          specs.PLACEHOLDER_ISSUE_INT,
216		"patch_ref":            specs.PLACEHOLDER_PATCH_REF,
217		"patch_repo":           specs.PLACEHOLDER_PATCH_REPO,
218		"patch_set":            specs.PLACEHOLDER_PATCHSET_INT,
219		"patch_storage":        specs.PLACEHOLDER_PATCH_STORAGE,
220		"repository":           specs.PLACEHOLDER_REPO,
221		"revision":             specs.PLACEHOLDER_REVISION,
222		"task_id":              specs.PLACEHOLDER_TASK_ID,
223	}
224
225	// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
226	// for how the CIPD package should be installed for a given task.
227	ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{
228		"gcloud_linux": {
229			uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME,
230			path:           "gcloud_linux",
231		},
232		"skimage": {
233			uploadTaskName: ISOLATE_SKIMAGE_NAME,
234			path:           "skimage",
235		},
236		"skp": {
237			uploadTaskName: ISOLATE_SKP_NAME,
238			path:           "skp",
239		},
240		"svg": {
241			uploadTaskName: ISOLATE_SVG_NAME,
242			path:           "svg",
243		},
244		"mskp": {
245			uploadTaskName: ISOLATE_MSKP_NAME,
246			path:           "mskp",
247		},
248		"android_ndk_linux": {
249			uploadTaskName: ISOLATE_NDK_LINUX_NAME,
250			path:           "android_ndk_linux",
251		},
252		"android_sdk_linux": {
253			uploadTaskName: ISOLATE_SDK_LINUX_NAME,
254			path:           "android_sdk_linux",
255		},
256		"win_toolchain": {
257			alwaysIsolate:  true,
258			uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME,
259			path:           "win_toolchain",
260		},
261	}
262
263	// Set dontReduceOpsTaskSplitting option on these models
264	DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{
265		"NUC5PPYH",
266	}
267)
268
269// Config contains general configuration information.
270type Config struct {
271	// Directory containing assets. Assumed to be relative to the directory
272	// which contains the calling gen_tasks.go file. If not specified, uses
273	// the infra/bots/assets from this repo.
274	AssetsDir string `json:"assets_dir"`
275
276	// Path to the builder name schema JSON file. Assumed to be relative to
277	// the directory which contains the calling gen_tasks.go file. If not
278	// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
279	// from this repo.
280	BuilderNameSchemaFile string `json:"builder_name_schema"`
281
282	// URL of the Skia Gold known hashes endpoint.
283	GoldHashesURL string `json:"gold_hashes_url"`
284
285	// GCS bucket used for GM results.
286	GsBucketGm string `json:"gs_bucket_gm"`
287
288	// GCS bucket used for Nanobench results.
289	GsBucketNano string `json:"gs_bucket_nano"`
290
291	// Optional function which returns a bot ID for internal devices.
292	InternalHardwareLabel func(parts map[string]string) *int `json:"-"`
293
294	// List of task names for which we'll never upload results.
295	NoUpload []string `json:"no_upload"`
296
297	// PathToSkia is the relative path from the root of the current checkout to
298	// the root of the Skia checkout.
299	PathToSkia string `json:"path_to_skia"`
300
301	// Swarming pool used for triggering tasks.
302	Pool string `json:"pool"`
303
304	// LUCI project associated with this repo.
305	Project string `json:"project"`
306
307	// Service accounts.
308	ServiceAccountCanary       string `json:"service_account_canary"`
309	ServiceAccountCompile      string `json:"service_account_compile"`
310	ServiceAccountHousekeeper  string `json:"service_account_housekeeper"`
311	ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"`
312	ServiceAccountUploadBinary string `json:"service_account_upload_binary"`
313	ServiceAccountUploadGM     string `json:"service_account_upload_gm"`
314	ServiceAccountUploadNano   string `json:"service_account_upload_nano"`
315
316	// Optional override function which derives Swarming bot dimensions
317	// from parts of task names.
318	SwarmDimensions func(parts map[string]string) []string `json:"-"`
319}
320
321// JobInfo is the type of each entry in the jobs.json file.
322type JobInfo struct {
323	// The name of the job.
324	Name string `json:"name"`
325
326	// The optional CQ config of this job. If the CQ config is missing then the
327	// job will not be added to the CQ of this branch.
328	CQConfig *specs.CommitQueueJobConfig `json:"cq_config,omitempty"`
329}
330
331// LoadConfig loads the Config from a cfg.json file which is the sibling of the
332// calling gen_tasks.go file.
333func LoadConfig() *Config {
334	cfgDir := getCallingDirName()
335	var cfg Config
336	LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg)
337	return &cfg
338}
339
340// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
341// caller from needing a dependency on the specs package.
342func CheckoutRoot() string {
343	root, err := specs.GetCheckoutRoot()
344	if err != nil {
345		log.Fatal(err)
346	}
347	return root
348}
349
350// LoadJson loads JSON from the given file and unmarshals it into the given
351// destination.
352func LoadJson(filename string, dest interface{}) {
353	b, err := ioutil.ReadFile(filename)
354	if err != nil {
355		log.Fatalf("Unable to read %q: %s", filename, err)
356	}
357	if err := json.Unmarshal(b, dest); err != nil {
358		log.Fatalf("Unable to parse %q: %s", filename, err)
359	}
360}
361
362// In returns true if |s| is *in* |a| slice.
363// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
364// huge set of additional dependencies added by that package.
365func In(s string, a []string) bool {
366	for _, x := range a {
367		if x == s {
368			return true
369		}
370	}
371	return false
372}
373
374// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
375// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
376// is similarly loaded from a cfg.json file which is the sibling of the calling
377// gen_tasks.go file.
378func GenTasks(cfg *Config) {
379	b := specs.MustNewTasksCfgBuilder()
380
381	// Find the paths to the infra/bots directories in this repo and the
382	// repo of the calling file.
383	relpathTargetDir := getThisDirName()
384	relpathBaseDir := getCallingDirName()
385
386	// Parse jobs.json.
387	var jobsWithInfo []*JobInfo
388	LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobsWithInfo)
389	// Create a slice with only job names.
390	jobs := []string{}
391	for _, j := range jobsWithInfo {
392		jobs = append(jobs, j.Name)
393	}
394
395	if cfg == nil {
396		cfg = new(Config)
397		LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg)
398	}
399
400	// Create the JobNameSchema.
401	builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json")
402	if cfg.BuilderNameSchemaFile != "" {
403		builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile)
404	}
405	schema, err := NewJobNameSchema(builderNameSchemaFile)
406	if err != nil {
407		log.Fatal(err)
408	}
409
410	// Set the assets dir.
411	assetsDir := filepath.Join(relpathTargetDir, "assets")
412	if cfg.AssetsDir != "" {
413		assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir)
414	}
415	b.SetAssetsDir(assetsDir)
416
417	// Create Tasks and Jobs.
418	builder := &builder{
419		TasksCfgBuilder: b,
420		cfg:             cfg,
421		jobNameSchema:   schema,
422		jobs:            jobs,
423	}
424	for _, j := range jobsWithInfo {
425		jb := newJobBuilder(builder, j.Name)
426		jb.genTasksForJob()
427		jb.finish()
428
429		// Add the CQ spec if it is a CQ job.
430		if j.CQConfig != nil {
431			b.MustAddCQJob(j.Name, j.CQConfig)
432		}
433	}
434
435	// Create CasSpecs.
436	b.MustAddCasSpec(CAS_BAZEL, &specs.CasSpec{
437		Root: "..",
438		Paths: []string{
439			// Source code.
440			"skia/example",
441			"skia/experimental/rust_png",
442			"skia/include",
443			"skia/modules",
444			"skia/src",
445			"skia/tests",
446			"skia/third_party",
447			"skia/tools",
448			// Needed for tests.
449			"skia/bench", // Needed to run benchmark tests with Bazel.
450			"skia/gm",    // Needed to run GMs with Bazel.
451			"skia/gn",    // Some Python scripts still live here.
452			"skia/resources",
453			"skia/package.json",
454			"skia/package-lock.json",
455			"skia/DEPS",   // Needed to check generation.
456			"skia/infra",  // Many Go tests and Bazel tools live here.
457			"skia/go.mod", // Needed by Gazelle.
458			"skia/go.sum", // Needed by Gazelle.
459			// Needed to run Bazel.
460			"skia/.bazelignore",
461			"skia/.bazelrc",
462			"skia/.bazelversion",
463			"skia/BUILD.bazel",
464			"skia/LICENSE", // Referred to by default_applicable_licenses
465			"skia/WORKSPACE.bazel",
466			"skia/bazel",
467			"skia/go_repositories.bzl",
468			"skia/requirements.txt",
469			"skia/toolchain",
470		},
471		Excludes: []string{
472			rbe.ExcludeGitDir,
473			"skia/third_party/externals",
474		},
475	})
476	b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{
477		Root: "..",
478		Paths: []string{
479			"skia/.vpython3",
480			"skia/infra/bots/run_recipe.py",
481			"skia/infra/canvaskit",
482			"skia/modules/canvaskit",
483			"skia/modules/pathkit/perf/perfReporter.js",
484			"skia/modules/pathkit/tests/testReporter.js",
485		},
486		Excludes: []string{rbe.ExcludeGitDir},
487	})
488	b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec)
489	b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI)
490	b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{
491		Root: "..",
492		Paths: []string{
493			"skia/.vpython3",
494			"skia/infra/bots/run_recipe.py",
495			"skia/tools/lottie-web-perf",
496		},
497		Excludes: []string{rbe.ExcludeGitDir},
498	})
499	b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{
500		Root: "..",
501		Paths: []string{
502			"skia/.vpython3",
503			"skia/infra/bots/run_recipe.py",
504			"skia/infra/pathkit",
505			"skia/modules/pathkit",
506		},
507		Excludes: []string{rbe.ExcludeGitDir},
508	})
509	b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{
510		Root: "..",
511		Paths: []string{
512			"skia/.vpython3",
513			"skia/infra/bots/assets",
514			"skia/infra/bots/run_recipe.py",
515			"skia/platform_tools/ios/bin",
516			"skia/resources",
517			"skia/tools/valgrind.supp",
518		},
519		Excludes: []string{rbe.ExcludeGitDir},
520	})
521	b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{
522		Root: "../skia", // Needed for other repos.
523		Paths: []string{
524			".vpython3",
525			"tools/perf-canvaskit-puppeteer",
526		},
527		Excludes: []string{rbe.ExcludeGitDir},
528	})
529	b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{
530		Root: "..",
531		Paths: []string{
532			"skia/.vpython3",
533			"skia/infra/config/recipes.cfg",
534			"skia/infra/bots/bundle_recipes.sh",
535			"skia/infra/bots/README.recipes.md",
536			"skia/infra/bots/recipe_modules",
537			"skia/infra/bots/recipes",
538			"skia/infra/bots/recipes.py",
539		},
540		Excludes: []string{rbe.ExcludeGitDir},
541	})
542	b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{
543		Root: "..",
544		Paths: []string{
545			"skia/.vpython3",
546			"skia/infra/bots/run_recipe.py",
547		},
548		Excludes: []string{rbe.ExcludeGitDir},
549	})
550	b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{
551		Root: "..",
552		Paths: []string{
553			"skia/.vpython3",
554			"skia/infra/bots/run_recipe.py",
555			"skia/tools/skottie-wasm-perf",
556		},
557		Excludes: []string{rbe.ExcludeGitDir},
558	})
559	b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{
560		Root: "..",
561		Paths: []string{
562			// Deps needed to use Bazel
563			"skia/.bazelrc",
564			"skia/.bazelversion",
565			"skia/BUILD.bazel",
566			"skia/LICENSE",
567			"skia/WORKSPACE.bazel",
568			"skia/bazel",
569			"skia/go_repositories.bzl",
570			"skia/include/config", // There's a WORKSPACE.bazel in here
571			"skia/requirements.txt",
572			"skia/toolchain",
573			// TODO(kjlubick, lukasza) remove after rust's png crate is updated
574			// and we don't need the patches anymore
575			"skia/experimental/rust_png",
576			// Actually needed to build the task drivers
577			"skia/infra/bots/BUILD.bazel",
578			"skia/infra/bots/build_task_drivers.sh",
579			"skia/infra/bots/task_drivers",
580		},
581		Excludes: []string{rbe.ExcludeGitDir},
582	})
583	b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{
584		Root: "..",
585		Paths: []string{
586			"skia/.vpython3",
587			"skia/infra/bots/assets",
588			"skia/infra/bots/run_recipe.py",
589			"skia/platform_tools/ios/bin",
590			"skia/resources",
591			"skia/tools/valgrind.supp",
592		},
593		Excludes: []string{rbe.ExcludeGitDir},
594	})
595	b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{
596		Root: "../skia", // Needed for other repos.
597		Paths: []string{
598			".vpython3",
599			"resources",
600			"tools/run-wasm-gm-tests",
601		},
602		Excludes: []string{rbe.ExcludeGitDir},
603	})
604	b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO)
605	b.MustAddCasSpec(CAS_RECREATE_SKPS, &specs.CasSpec{
606		Root: "..",
607		Paths: []string{
608			"skia/.vpython3",
609			"skia/DEPS",
610			"skia/bin/fetch-sk",
611			"skia/infra/bots/assets/skp",
612			"skia/infra/bots/utils.py",
613			"skia/tools/skp",
614		},
615		Excludes: []string{rbe.ExcludeGitDir},
616	})
617	generateCompileCAS(b, cfg)
618
619	builder.MustFinish()
620}
621
622// getThisDirName returns the infra/bots directory which is an ancestor of this
623// file.
624func getThisDirName() string {
625	_, thisFileName, _, ok := runtime.Caller(0)
626	if !ok {
627		log.Fatal("Unable to find path to current file.")
628	}
629	return filepath.Dir(filepath.Dir(thisFileName))
630}
631
632// getCallingDirName returns the infra/bots directory which is an ancestor of
633// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
634// file appears two steps up the stack; do not call from a function which is not
635// directly called by gen_tasks.go.
636func getCallingDirName() string {
637	_, callingFileName, _, ok := runtime.Caller(2)
638	if !ok {
639		log.Fatal("Unable to find path to calling file.")
640	}
641	return filepath.Dir(callingFileName)
642}
643
644// builder is a wrapper for specs.TasksCfgBuilder.
645type builder struct {
646	*specs.TasksCfgBuilder
647	cfg           *Config
648	jobNameSchema *JobNameSchema
649	jobs          []string
650}
651
652// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
653// does by default.
654func marshalJson(data interface{}) string {
655	j, err := json.Marshal(data)
656	if err != nil {
657		log.Fatal(err)
658	}
659	return strings.Replace(string(j), "\\u003c", "<", -1)
660}
661
662// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
663// recipe bundle.
664func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) {
665	b.usesLUCIAuth()
666	b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}"))
667	b.env("RECIPES_USE_PY3", "true")
668	b.envPrefixes("VPYTHON_DEFAULT_SPEC", "skia/.vpython3")
669	b.usesPython()
670	b.recipeProp("swarm_out_dir", outputDir)
671	if outputDir != OUTPUT_NONE {
672		b.output(outputDir)
673	}
674	const python = "cipd_bin_packages/vpython3${EXECUTABLE_SUFFIX}"
675	b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project)
676	// Most recipes want this isolate; they can override if necessary.
677	b.cas(CAS_RUN_RECIPE)
678	b.timeout(time.Hour)
679	b.Spec.ExtraTags = map[string]string{
680		"log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project),
681	}
682
683	// Attempts.
684	if !b.role("Build", "Upload") && b.extraConfig("ASAN", "HWASAN", "MSAN", "TSAN", "Valgrind") {
685		// Sanitizers often find non-deterministic issues that retries would hide.
686		b.attempts(1)
687	} else {
688		// Retry by default to hide random bot/hardware failures.
689		b.attempts(2)
690	}
691}
692
693// kitchenTask sets up the task to run a recipe via Kitchen.
694func (b *taskBuilder) kitchenTask(recipe string, outputDir string) {
695	b.kitchenTaskNoBundle(recipe, outputDir)
696	b.dep(b.bundleRecipes())
697}
698
699// internalHardwareLabel returns the internal ID for the bot, if any.
700func (b *taskBuilder) internalHardwareLabel() *int {
701	if b.cfg.InternalHardwareLabel != nil {
702		return b.cfg.InternalHardwareLabel(b.parts)
703	}
704	return nil
705}
706
707// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
708func (b *taskBuilder) linuxGceDimensions(machineType string) {
709	b.dimension(
710		// Specify CPU to avoid running builds on bots with a more unique CPU.
711		"cpu:x86-64-Haswell_GCE",
712		"gpu:none",
713		// Currently all Linux GCE tasks run on 16-CPU machines.
714		fmt.Sprintf("machine_type:%s", machineType),
715		fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
716		fmt.Sprintf("pool:%s", b.cfg.Pool),
717	)
718}
719
720// codesizeTaskNameRegexp captures the "CodeSize-<binary name>-" prefix of a CodeSize task name.
721var codesizeTaskNameRegexp = regexp.MustCompile("^CodeSize-[a-zA-Z0-9_]+-")
722
723// deriveCompileTaskName returns the name of a compile task based on the given
724// job name.
725func (b *jobBuilder) deriveCompileTaskName() string {
726	if b.role("Test", "Perf") {
727		task_os := b.parts["os"]
728		ec := []string{}
729		if val := b.parts["extra_config"]; val != "" {
730			ec = strings.Split(val, "_")
731			ignore := []string{
732				"AbandonGpuContext", "PreAbandonGpuContext", "Valgrind",
733				"FailFlushTimeCallbacks", "ReleaseAndAbandonGpuContext",
734				"NativeFonts", "GDI", "NoGPUThreads", "DDL1", "DDL3",
735				"DDLRecord", "BonusConfigs", "ColorSpaces", "GL",
736				"SkottieTracing", "SkottieWASM", "GpuTess", "DMSAAStats", "Docker", "PDF",
737				"Puppeteer", "SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile",
738				"WebGL2", "i5", "OldestSupportedSkpVersion", "FakeWGPU", "TintIR", "Protected",
739				"AndroidNDKFonts"}
740			keep := make([]string, 0, len(ec))
741			for _, part := range ec {
742				if !In(part, ignore) {
743					keep = append(keep, part)
744				}
745			}
746			ec = keep
747		}
748		if b.matchOs("Android") {
749			if !In("Android", ec) {
750				ec = append([]string{"Android"}, ec...)
751			}
752			task_os = COMPILE_TASK_NAME_OS_LINUX
753		} else if b.os("ChromeOS") {
754			ec = append([]string{"Chromebook", "GLES"}, ec...)
755			task_os = COMPILE_TASK_NAME_OS_LINUX
756		} else if b.os("iOS") {
757			ec = append([]string{task_os}, ec...)
758			if b.parts["compiler"] == "Xcode11.4.1" {
759				task_os = "Mac10.15.7"
760			} else {
761				task_os = "Mac"
762			}
763		} else if b.matchOs("Win") {
764			task_os = "Win"
765		} else if b.compiler("GCC") {
766			// GCC compiles are now on a Docker container. We use the same OS and
767			// version to compile as to test.
768			ec = append(ec, "Docker")
769		} else if b.matchOs("Debian11") {
770			// We compile using the Debian11 machines in the skolo.
771			task_os = "Debian11"
772		} else if b.matchOs("Ubuntu", "Debian") {
773			task_os = COMPILE_TASK_NAME_OS_LINUX
774		} else if b.matchOs("Mac") {
775			task_os = "Mac"
776		}
777		jobNameMap := map[string]string{
778			"role":          "Build",
779			"os":            task_os,
780			"compiler":      b.parts["compiler"],
781			"target_arch":   b.parts["arch"],
782			"configuration": b.parts["configuration"],
783		}
784		if b.extraConfig("PathKit") {
785			ec = []string{"PathKit"}
786			// We prefer to compile this in the cloud because we have more resources there
787			jobNameMap["os"] = "Debian10"
788		}
789		if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") {
790			if b.cpu() {
791				ec = []string{"CanvasKit_CPU"}
792			} else {
793				ec = []string{"CanvasKit"}
794			}
795			// We prefer to compile this in the cloud because we have more resources there
796			jobNameMap["os"] = "Debian10"
797		}
798		if len(ec) > 0 {
799			jobNameMap["extra_config"] = strings.Join(ec, "_")
800		}
801		name, err := b.jobNameSchema.MakeJobName(jobNameMap)
802		if err != nil {
803			log.Fatal(err)
804		}
805		return name
806	} else if b.role("BuildStats") {
807		return strings.Replace(b.Name, "BuildStats", "Build", 1)
808	} else if b.role("CodeSize") {
809		return codesizeTaskNameRegexp.ReplaceAllString(b.Name, "Build-")
810	} else {
811		return b.Name
812	}
813}
814
815// swarmDimensions generates swarming bot dimensions for the given task.
816func (b *taskBuilder) swarmDimensions() {
817	if b.cfg.SwarmDimensions != nil {
818		dims := b.cfg.SwarmDimensions(b.parts)
819		if dims != nil {
820			b.dimension(dims...)
821			return
822		}
823	}
824	b.defaultSwarmDimensions()
825}
826
827// androidDeviceInfo maps Android models (as in the "model" part of a task) to the device_type and
828// device_os Swarming dimensions.
829var androidDeviceInfos = map[string][]string{
830	"AndroidOne":      {"sprout", "MOB30Q"},
831	"GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo.
832	"GalaxyS9":        {"starlte", "QP1A.190711.020"},     // This is Android10.
833	"GalaxyS20":       {"exynos990", "QP1A.190711.020"},
834	"GalaxyS24":       {"pineapple", "UP1A.231005.007"},
835	"JioNext":         {"msm8937", "RKQ1.210602.002"},
836	"Mokey":           {"mokey", "UDC_11161052"},
837	"MokeyGo32":       {"mokey_go32", "UQ1A.240105.003.A1_11159138"},
838	"Nexus5":          {"hammerhead", "M4B30Z_3437181"},
839	"Nexus7":          {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7
840	"P30":             {"HWELE", "HUAWEIELE-L29"},
841	"Pixel2XL":        {"taimen", "PPR1.180610.009"},
842	"Pixel3":          {"blueline", "PQ1A.190105.004"},
843	"Pixel3a":         {"sargo", "QP1A.190711.020"},
844	"Pixel4":          {"flame", "RPB2.200611.009"},       // R Preview
845	"Pixel4a":         {"sunfish", "AOSP.MASTER_7819821"}, // Pixel4a flashed with an Android HWASan build.
846	"Pixel4XL":        {"coral", "QD1A.190821.011.C4"},
847	"Pixel5":          {"redfin", "RD1A.200810.022.A4"},
848	"Pixel6":          {"oriole", "SD1A.210817.037"},
849	"Pixel7":          {"cheetah", "TD1A.221105.002"},
850	"Pixel9":          {"tokay", "AD1A.240905.004"},
851	"TecnoSpark3Pro":  {"TECNO-KB8", "PPR1.180610.011"},
852	"Wembley":         {"wembley", "SP2A.220505.008"},
853}
854
855// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
856func (b *taskBuilder) defaultSwarmDimensions() {
857	d := map[string]string{
858		"pool": b.cfg.Pool,
859	}
860	if os, ok := b.parts["os"]; ok {
861		d["os"], ok = map[string]string{
862			"Android":    "Android",
863			"Android12":  "Android",
864			"ChromeOS":   "ChromeOS",
865			"Debian9":    DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker.
866			"Debian10":   DEFAULT_OS_LINUX_GCE,
867			"Debian11":   DEBIAN_11_OS,
868			"Mac":        DEFAULT_OS_MAC,
869			"Mac10.15.1": "Mac-10.15.1",
870			"Mac10.15.7": "Mac-10.15.7",
871			"Mac11":      "Mac-11.4",
872			"Mac12":      "Mac-12",
873			"Mac13":      "Mac-13",
874			"Mokey":      "Android",
875			"MokeyGo32":  "Android",
876			"Ubuntu18":   "Ubuntu-18.04",
877			"Win":        DEFAULT_OS_WIN_GCE,
878			"Win10":      "Windows-10-19045",
879			"Win2019":    DEFAULT_OS_WIN_GCE,
880			"iOS":        "iOS-13.3.1",
881		}[os]
882		if !ok {
883			log.Fatalf("Entry %q not found in OS mapping.", os)
884		}
885		if os == "Debian11" && b.extraConfig("Docker") {
886			d["os"] = DEFAULT_OS_LINUX_GCE
887		}
888		if os == "Win10" && b.parts["model"] == "Golo" {
889			// ChOps-owned machines have Windows 10 22H2.
890			d["os"] = "Windows-10-19045"
891		}
892		if b.parts["model"] == "iPhone11" {
893			d["os"] = "iOS-13.6"
894		}
895		if b.parts["model"] == "iPadPro" {
896			d["os"] = "iOS-13.6"
897		}
898	} else {
899		d["os"] = DEFAULT_OS_DEBIAN
900	}
901	if b.role("Test", "Perf") {
902		if b.os("Android") {
903			// For Android, the device type is a better dimension
904			// than CPU or GPU.
905			deviceInfo, ok := androidDeviceInfos[b.parts["model"]]
906			if !ok {
907				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
908			}
909			d["device_type"] = deviceInfo[0]
910			d["device_os"] = deviceInfo[1]
911
912			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
913			// See https://developer.android.com/ndk/guides/hwasan.
914			if b.extraConfig("HWASAN") {
915				d["android_hwasan_build"] = "1"
916			}
917		} else if b.os("Android12") {
918			// For Android, the device type is a better dimension
919			// than CPU or GPU.
920			deviceInfo, ok := map[string][]string{
921				"Pixel5": {"redfin", "SP2A.220305.012"},
922			}[b.parts["model"]]
923			if !ok {
924				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
925			}
926			d["device_type"] = deviceInfo[0]
927			d["device_os"] = deviceInfo[1]
928
929			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
930			// See https://developer.android.com/ndk/guides/hwasan.
931			if b.extraConfig("HWASAN") {
932				d["android_hwasan_build"] = "1"
933			}
934		} else if b.os("iOS") {
935			device, ok := map[string]string{
936				"iPadMini4": "iPad5,1",
937				"iPhone7":   "iPhone9,1",
938				"iPhone8":   "iPhone10,1",
939				"iPhone11":  "iPhone12,1",
940				"iPadPro":   "iPad6,3",
941			}[b.parts["model"]]
942			if !ok {
943				log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"])
944			}
945			d["device_type"] = device
946		} else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") {
947			modelMapping, ok := map[string]map[string]string{
948				"AppleM1": {
949					"MacMini9.1": "arm64-64-Apple_M1",
950				},
951				"AppleIntel": {
952					"MacBookPro16.2": "x86-64",
953				},
954				"AVX": {
955					"VMware7.1": "x86-64",
956				},
957				"AVX2": {
958					"GCE":            "x86-64-Haswell_GCE",
959					"MacBookAir7.2":  "x86-64-i5-5350U",
960					"MacBookPro11.5": "x86-64-i7-4870HQ",
961					"MacMini7.1":     "x86-64-i5-4278U",
962					"NUC5i7RYH":      "x86-64-i7-5557U",
963					"NUC9i7QN":       "x86-64-i7-9750H",
964					"NUC11TZi5":      "x86-64-i5-1135G7",
965				},
966				"AVX512": {
967					"GCE":  "x86-64-Skylake_GCE",
968					"Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel",
969				},
970				"Rome": {
971					"GCE": "x86-64-AMD_Rome_GCE",
972				},
973				"SwiftShader": {
974					"GCE": "x86-64-Haswell_GCE",
975				},
976			}[b.parts["cpu_or_gpu_value"]]
977			if !ok {
978				log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"])
979			}
980			cpu, ok := modelMapping[b.parts["model"]]
981			if !ok {
982				log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"])
983			}
984			d["cpu"] = cpu
985			if b.model("GCE") && b.matchOs("Debian") {
986				d["os"] = DEFAULT_OS_LINUX_GCE
987			}
988			if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" {
989				d["machine_type"] = MACHINE_TYPE_MEDIUM
990			}
991		} else {
992			// It's a GPU job.
993			if b.matchOs("Win") {
994				gpu, ok := map[string]string{
995					// At some point this might use the device ID, but for now it's like Chromebooks.
996					"GTX660":        "10de:11c0-26.21.14.4120",
997					"GTX960":        "10de:1401-32.0.15.6094",
998					"IntelHD4400":   "8086:0a16-20.19.15.4963",
999					"IntelIris540":  "8086:1926-31.0.101.2115",
1000					"IntelIris6100": "8086:162b-20.19.15.4963",
1001					"IntelIris655":  "8086:3ea5-26.20.100.7463",
1002					"IntelIrisXe":   "8086:9a49-32.0.101.5972",
1003					"RadeonHD7770":  "1002:683d-26.20.13031.18002",
1004					"RadeonR9M470X": "1002:6646-26.20.13031.18002",
1005					"QuadroP400":    "10de:1cb3-31.0.15.5222",
1006					"RadeonVega6":   "1002:1636-31.0.14057.5006",
1007					"RTX3060":       "10de:2489-32.0.15.6094",
1008				}[b.parts["cpu_or_gpu_value"]]
1009				if !ok {
1010					log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"])
1011				}
1012				d["gpu"] = gpu
1013			} else if b.isLinux() {
1014				gpu, ok := map[string]string{
1015					// Intel drivers come from CIPD, so no need to specify the version here.
1016					"IntelHD2000":  "8086:0102",
1017					"IntelHD405":   "8086:22b1",
1018					"IntelIris640": "8086:5926",
1019					"QuadroP400":   "10de:1cb3-510.60.02",
1020					"RTX3060":      "10de:2489-470.182.03",
1021					"IntelIrisXe":  "8086:9a49",
1022					"RadeonVega6":  "1002:1636",
1023				}[b.parts["cpu_or_gpu_value"]]
1024				if !ok {
1025					log.Fatalf("Entry %q not found in Ubuntu GPU mapping.", b.parts["cpu_or_gpu_value"])
1026				}
1027				d["gpu"] = gpu
1028
1029				if b.matchOs("Debian11") {
1030					d["os"] = DEBIAN_11_OS
1031				} else if b.matchOs("Debian") {
1032					// The Debian10 machines in the skolo are 10.10, not 10.3.
1033					d["os"] = DEFAULT_OS_DEBIAN
1034				}
1035				if b.parts["cpu_or_gpu_value"] == "IntelIrisXe" {
1036					// The Intel Iris Xe devices are Debian 11.3.
1037					d["os"] = "Debian-bookworm/sid"
1038				}
1039			} else if b.matchOs("Mac") {
1040				gpu, ok := map[string]string{
1041					"AppleM1":       "AppleM1",
1042					"IntelHD6000":   "8086:1626",
1043					"IntelHD615":    "8086:591e",
1044					"IntelIris5100": "8086:0a2e",
1045					"IntelIrisPlus": "8086:8a53",
1046					"RadeonHD8870M": "1002:6821-4.0.20-3.2.8",
1047				}[b.parts["cpu_or_gpu_value"]]
1048				if !ok {
1049					log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"])
1050				}
1051				if gpu == "AppleM1" {
1052					// No GPU dimension yet, but we can constrain by CPU.
1053					d["cpu"] = "arm64-64-Apple_M1"
1054				} else {
1055					d["gpu"] = gpu
1056				}
1057				// We have two different types of MacMini7,1 with the same GPU but different CPUs.
1058				if b.gpu("IntelIris5100") {
1059					if b.extraConfig("i5") {
1060						// If we say "i5", run on our MacMini7,1s in the Skolo:
1061						d["cpu"] = "x86-64-i5-4278U"
1062					} else {
1063						// Otherwise, run on Golo machines, just because that's
1064						// where those jobs have always run. Plus, some of them
1065						// are Perf jobs, which we want to keep consistent.
1066						d["cpu"] = "x86-64-i7-4578U"
1067					}
1068				}
1069			} else if b.os("ChromeOS") {
1070				version, ok := map[string]string{
1071					"IntelUHDGraphics605": "15236.2.0",
1072					"RadeonVega3":         "14233.0.0",
1073					"Adreno618":           "14150.39.0",
1074					"MaliT860":            "14092.77.0",
1075				}[b.parts["cpu_or_gpu_value"]]
1076				if !ok {
1077					log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"])
1078				}
1079				d["gpu"] = b.parts["cpu_or_gpu_value"]
1080				d["release_version"] = version
1081			} else {
1082				log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"])
1083			}
1084		}
1085	} else {
1086		if d["os"] == DEBIAN_11_OS {
1087			// The Debian11 compile machines in the skolo have
1088			// GPUs, but we still use them for compiles also.
1089
1090			// Dodge Raspberry Pis.
1091			d["cpu"] = "x86-64"
1092			// Target the AMDRyzen 5 4500U machines, as they are beefy and we have
1093			// 19 of them, and they are setup to compile.
1094			d["gpu"] = "1002:1636"
1095		} else {
1096			d["gpu"] = "none"
1097		}
1098		if d["os"] == DEFAULT_OS_LINUX_GCE {
1099			if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats", "CodeSize") {
1100				b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1101				return
1102			}
1103			// Use many-core machines for Build tasks.
1104			b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1105			return
1106		} else if d["os"] == DEFAULT_OS_WIN_GCE {
1107			// Windows CPU bots.
1108			d["cpu"] = "x86-64-Haswell_GCE"
1109			// Use many-core machines for Build tasks.
1110			d["machine_type"] = MACHINE_TYPE_LARGE
1111		} else if d["os"] == DEFAULT_OS_MAC || d["os"] == "Mac-10.15.7" {
1112			// Mac CPU bots are no longer VMs.
1113			d["cpu"] = "x86-64"
1114			d["cores"] = "12"
1115			delete(d, "gpu")
1116		}
1117	}
1118
1119	dims := make([]string, 0, len(d))
1120	for k, v := range d {
1121		dims = append(dims, fmt.Sprintf("%s:%s", k, v))
1122	}
1123	sort.Strings(dims)
1124	b.dimension(dims...)
1125}
1126
1127// bundleRecipes generates the task to bundle and isolate the recipes. Returns
1128// the name of the task, which may be added as a dependency.
1129func (b *jobBuilder) bundleRecipes() string {
1130	b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) {
1131		b.usesGit()
1132		b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR)
1133		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1134		b.idempotent()
1135		b.cas(CAS_RECIPES)
1136		b.usesPython()
1137	})
1138	return BUNDLE_RECIPES_NAME
1139}
1140
1141// buildTaskDrivers generates the task to compile the task driver code to run on
1142// all platforms. Returns the name of the task, which may be added as a
1143// dependency.
1144func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string {
1145	name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch
1146	b.addTask(name, func(b *taskBuilder) {
1147		b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh",
1148			specs.PLACEHOLDER_ISOLATED_OUTDIR,
1149			goos+"_"+goarch)
1150		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1151		b.usesBazel("linux_x64")
1152		b.idempotent()
1153		b.cas(CAS_TASK_DRIVERS)
1154	})
1155	return name
1156}
1157
1158// createDockerImage creates the specified docker image. Returns the name of the
1159// generated task.
1160func (b *jobBuilder) createDockerImage(wasm bool) string {
1161	// First, derive the name of the task.
1162	imageName := "skia-release"
1163	taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
1164	if wasm {
1165		imageName = "skia-wasm-release"
1166		taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
1167	}
1168	imageDir := path.Join("docker", imageName)
1169
1170	// Add the task.
1171	b.addTask(taskName, func(b *taskBuilder) {
1172		// TODO(borenet): Make this task not use Git.
1173		b.usesGit()
1174		b.cmd(
1175			b.taskDriver("build_push_docker_image", false),
1176			"--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName),
1177			"--dockerfile_dir", imageDir,
1178			"--project_id", "skia-swarming-bots",
1179			"--task_id", specs.PLACEHOLDER_TASK_ID,
1180			"--task_name", b.Name,
1181			"--workdir", ".",
1182			"--gerrit_project", "skia",
1183			"--gerrit_url", "https://skia-review.googlesource.com",
1184			"--repo", specs.PLACEHOLDER_REPO,
1185			"--revision", specs.PLACEHOLDER_REVISION,
1186			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1187			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1188			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1189			"--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR,
1190		)
1191		b.cas(CAS_EMPTY)
1192		b.serviceAccount(b.cfg.ServiceAccountCompile)
1193		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1194		b.usesDocker()
1195		b.cache(CACHES_DOCKER...)
1196		b.timeout(time.Hour)
1197	})
1198	return taskName
1199}
1200
1201// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
1202// (eg: fiddler, api) using the skia-release docker image.
1203func (b *jobBuilder) createPushAppsFromSkiaDockerImage() {
1204	b.addTask(b.Name, func(b *taskBuilder) {
1205		// TODO(borenet): Make this task not use Git.
1206		b.usesGit()
1207		b.cmd(
1208			b.taskDriver("push_apps_from_skia_image", false),
1209			"--project_id", "skia-swarming-bots",
1210			"--task_id", specs.PLACEHOLDER_TASK_ID,
1211			"--task_name", b.Name,
1212			"--workdir", ".",
1213			"--repo", specs.PLACEHOLDER_REPO,
1214			"--revision", specs.PLACEHOLDER_REVISION,
1215			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1216			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1217			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1218			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1219		)
1220		b.dep(b.createDockerImage(false))
1221		b.cas(CAS_EMPTY)
1222		b.usesBazel("linux_x64")
1223		b.serviceAccount(b.cfg.ServiceAccountCompile)
1224		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1225		b.usesDocker()
1226		b.cache(CACHES_DOCKER...)
1227		b.timeout(2 * time.Hour)
1228	})
1229}
1230
1231var iosRegex = regexp.MustCompile(`os:iOS-(.*)`)
1232
1233func (b *taskBuilder) maybeAddIosDevImage() {
1234	for _, dim := range b.Spec.Dimensions {
1235		if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 {
1236			var asset string
1237			switch m[1] {
1238			// Other patch versions can be added to the same case.
1239			case "11.4.1":
1240				asset = "ios-dev-image-11.4"
1241			case "13.3.1":
1242				asset = "ios-dev-image-13.3"
1243			case "13.4.1":
1244				asset = "ios-dev-image-13.4"
1245			case "13.5.1":
1246				asset = "ios-dev-image-13.5"
1247			case "13.6":
1248				asset = "ios-dev-image-13.6"
1249			default:
1250				log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1])
1251			}
1252			b.asset(asset)
1253			break
1254		} else if strings.Contains(dim, "iOS") {
1255			log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim)
1256		}
1257	}
1258}
1259
1260// compile generates a compile task. Returns the name of the compile task.
1261func (b *jobBuilder) compile() string {
1262	name := b.deriveCompileTaskName()
1263	if b.extraConfig("WasmGMTests") {
1264		b.compileWasmGMTests(name)
1265	} else {
1266		b.addTask(name, func(b *taskBuilder) {
1267			recipe := "compile"
1268			casSpec := CAS_COMPILE
1269			if b.extraConfig("NoDEPS", "CMake", "Flutter", "NoPatch") || b.shellsOutToBazel() {
1270				recipe = "sync_and_compile"
1271				casSpec = CAS_RUN_RECIPE
1272				b.recipeProps(EXTRA_PROPS)
1273				b.usesGit()
1274				if !b.extraConfig("NoDEPS") {
1275					b.cache(CACHES_WORKDIR...)
1276				}
1277			} else {
1278				b.idempotent()
1279			}
1280			if b.extraConfig("NoPatch") {
1281				b.kitchenTask(recipe, OUTPUT_BUILD_NOPATCH)
1282			} else {
1283				b.kitchenTask(recipe, OUTPUT_BUILD)
1284			}
1285			b.cas(casSpec)
1286			b.serviceAccount(b.cfg.ServiceAccountCompile)
1287			b.swarmDimensions()
1288			if b.extraConfig("Docker", "LottieWeb", "CMake") || b.compiler("EMCC") {
1289				b.usesDocker()
1290				b.cache(CACHES_DOCKER...)
1291			}
1292			if b.extraConfig("Dawn") {
1293				// https://dawn.googlesource.com/dawn/+/516701da8184655a47c92a573cc84da7db5e69d4/generator/dawn_version_generator.py#21
1294				b.usesGit()
1295			}
1296
1297			// Android bots require a toolchain.
1298			if b.extraConfig("Android") {
1299				if b.matchOs("Mac") {
1300					b.asset("android_ndk_darwin")
1301				} else if b.matchOs("Win") {
1302					pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows")
1303					pkg.Path = "n"
1304					b.cipd(pkg)
1305				} else {
1306					b.asset("android_ndk_linux")
1307				}
1308			} else if b.extraConfig("Chromebook") {
1309				b.asset("clang_linux")
1310				if b.arch("x86_64") {
1311					b.asset("chromebook_x86_64_gles")
1312				} else if b.arch("arm") {
1313					b.asset("armhf_sysroot")
1314					b.asset("chromebook_arm_gles")
1315				}
1316			} else if b.isLinux() {
1317				if b.compiler("Clang") {
1318					b.asset("clang_linux")
1319				}
1320				if b.extraConfig("SwiftShader") {
1321					b.asset("cmake_linux")
1322				}
1323				b.asset("ccache_linux")
1324				b.usesCCache()
1325				if b.shellsOutToBazel() {
1326					b.usesBazel("linux_x64")
1327					b.attempts(1)
1328				}
1329			} else if b.matchOs("Win") {
1330				b.asset("win_toolchain")
1331				if b.compiler("Clang") {
1332					b.asset("clang_win")
1333				}
1334				if b.extraConfig("DWriteCore") {
1335					b.asset("dwritecore")
1336				}
1337			} else if b.matchOs("Mac") {
1338				b.cipd(CIPD_PKGS_XCODE...)
1339				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1340					Name: "xcode",
1341					Path: "cache/Xcode.app",
1342				})
1343				b.asset("ccache_mac")
1344				b.usesCCache()
1345				if b.extraConfig("iOS") {
1346					b.asset("provisioning_profile_ios")
1347				}
1348				if b.shellsOutToBazel() {
1349					// All of our current Mac compile machines are x64 Mac only.
1350					b.usesBazel("mac_x64")
1351					b.attempts(1)
1352				}
1353			}
1354		})
1355	}
1356
1357	// All compile tasks are runnable as their own Job. Assert that the Job
1358	// is listed in jobs.
1359	if !In(name, b.jobs) {
1360		log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name)
1361	}
1362
1363	return name
1364}
1365
1366// recreateSKPs generates a RecreateSKPs task.
1367func (b *jobBuilder) recreateSKPs() {
1368	b.addTask(b.Name, func(b *taskBuilder) {
1369		cmd := []string{
1370			b.taskDriver("recreate_skps", false),
1371			"--local=false",
1372			"--project_id", "skia-swarming-bots",
1373			"--task_id", specs.PLACEHOLDER_TASK_ID,
1374			"--task_name", b.Name,
1375			"--skia_revision", specs.PLACEHOLDER_REVISION,
1376			"--patch_ref", specs.PLACEHOLDER_PATCH_REF,
1377			"--git_cache", "cache/git",
1378			"--checkout_root", "cache/work",
1379			"--dm_path", "build/dm",
1380		}
1381		if b.matchExtraConfig("DryRun") {
1382			cmd = append(cmd, "--dry_run")
1383		}
1384
1385		b.cas(CAS_RECREATE_SKPS)
1386		b.dep("Build-Debian10-Clang-x86_64-Release") // To get DM.
1387		b.cmd(cmd...)
1388		b.usesLUCIAuth()
1389		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1390		b.dimension(
1391			"pool:SkiaCT",
1392			fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
1393		)
1394		b.usesGo()
1395		b.cache(CACHES_WORKDIR...)
1396		b.timeout(6 * time.Hour)
1397		b.usesPython()
1398		b.attempts(2)
1399	})
1400}
1401
1402// checkGeneratedFiles verifies that no generated SKSL files have been edited by hand, and that
1403// we do not get any diffs after regenerating all files (go generate, Gazelle, etc.).
1404func (b *jobBuilder) checkGeneratedFiles() {
1405	b.addTask(b.Name, func(b *taskBuilder) {
1406		b.cas(CAS_BAZEL)
1407		b.cmd(
1408			b.taskDriver("check_generated_files", false),
1409			"--local=false",
1410			"--git_path=cipd_bin_packages/git",
1411			"--project_id", "skia-swarming-bots",
1412			"--task_id", specs.PLACEHOLDER_TASK_ID,
1413			"--task_name", b.Name,
1414			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1415			"--bazel_arg=--config=for_linux_x64_with_rbe",
1416			"--bazel_arg=--jobs=100",
1417		)
1418		b.usesBazel("linux_x64")
1419		b.usesGit()
1420		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1421		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1422	})
1423}
1424
1425// goLinters runs various Go linters (gofmt, errcheck, etc.) and fails if there are any errors or
1426// diffs.
1427func (b *jobBuilder) goLinters() {
1428	b.addTask(b.Name, func(b *taskBuilder) {
1429		b.cas(CAS_BAZEL)
1430		b.cmd(
1431			b.taskDriver("go_linters", false),
1432			"--local=false",
1433			"--git_path=cipd_bin_packages/git",
1434			"--project_id", "skia-swarming-bots",
1435			"--task_id", specs.PLACEHOLDER_TASK_ID,
1436			"--task_name", b.Name,
1437			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1438			"--bazel_arg=--config=for_linux_x64_with_rbe",
1439			"--bazel_arg=--jobs=100",
1440		)
1441		b.usesBazel("linux_x64")
1442		b.usesGit()
1443		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1444		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1445	})
1446}
1447
1448// checkGnToBp verifies that the gn_to_bp.py script continues to work.
1449func (b *jobBuilder) checkGnToBp() {
1450	b.addTask(b.Name, func(b *taskBuilder) {
1451		b.cas(CAS_COMPILE)
1452		b.cmd(
1453			b.taskDriver("run_gn_to_bp", false),
1454			"--local=false",
1455			"--project_id", "skia-swarming-bots",
1456			"--task_id", specs.PLACEHOLDER_TASK_ID,
1457			"--task_name", b.Name,
1458		)
1459		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1460		b.usesPython()
1461		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1462	})
1463}
1464
1465// housekeeper generates a Housekeeper task.
1466func (b *jobBuilder) housekeeper() {
1467	b.addTask(b.Name, func(b *taskBuilder) {
1468		b.recipeProps(EXTRA_PROPS)
1469		b.kitchenTask("housekeeper", OUTPUT_NONE)
1470		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1471		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1472		b.usesGit()
1473		b.cache(CACHES_WORKDIR...)
1474	})
1475}
1476
1477// g3FrameworkCanary generates a G3 Framework Canary task. Returns
1478// the name of the last task in the generated chain of tasks, which the Job
1479// should add as a dependency.
1480func (b *jobBuilder) g3FrameworkCanary() {
1481	b.addTask(b.Name, func(b *taskBuilder) {
1482		b.cas(CAS_EMPTY)
1483		b.cmd(
1484			b.taskDriver("g3_canary", false),
1485			"--local=false",
1486			"--project_id", "skia-swarming-bots",
1487			"--task_id", specs.PLACEHOLDER_TASK_ID,
1488			"--task_name", b.Name,
1489			"--repo", specs.PLACEHOLDER_REPO,
1490			"--revision", specs.PLACEHOLDER_REVISION,
1491			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1492			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1493			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1494		)
1495		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1496		b.usesLUCIAuth()
1497		b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com")
1498		b.timeout(3 * time.Hour)
1499		b.attempts(1)
1500	})
1501}
1502
1503// infra generates an infra_tests task.
1504func (b *jobBuilder) infra() {
1505	b.addTask(b.Name, func(b *taskBuilder) {
1506		if b.matchOs("Win") || b.matchExtraConfig("Win") {
1507			b.dimension(
1508				// Specify CPU to avoid running builds on bots with a more unique CPU.
1509				"cpu:x86-64-Haswell_GCE",
1510				"gpu:none",
1511				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances.
1512				fmt.Sprintf("os:%s", DEFAULT_OS_WIN_GCE),
1513				fmt.Sprintf("pool:%s", b.cfg.Pool),
1514			)
1515		} else {
1516			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1517		}
1518		b.recipeProp("repository", specs.PLACEHOLDER_REPO)
1519		b.kitchenTask("infra", OUTPUT_NONE)
1520		b.cas(CAS_WHOLE_REPO)
1521		b.serviceAccount(b.cfg.ServiceAccountCompile)
1522		b.usesGSUtil()
1523		b.idempotent()
1524		b.usesGo()
1525	})
1526}
1527
1528// buildstats generates a builtstats task, which compiles code and generates
1529// statistics about the build.
1530func (b *jobBuilder) buildstats() {
1531	compileTaskName := b.compile()
1532	b.addTask(b.Name, func(b *taskBuilder) {
1533		b.recipeProps(EXTRA_PROPS)
1534		b.kitchenTask("compute_buildstats", OUTPUT_PERF)
1535		b.dep(compileTaskName)
1536		b.asset("bloaty")
1537		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1538		b.usesDocker()
1539		b.usesGit()
1540		b.cache(CACHES_WORKDIR...)
1541	})
1542	// Upload release results (for tracking in perf)
1543	// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
1544	if b.release() && !b.arch("x86_64") {
1545		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1546		depName := b.Name
1547		b.addTask(uploadName, func(b *taskBuilder) {
1548			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1549			b.recipeProps(EXTRA_PROPS)
1550			// TODO(borenet): I'm not sure why the upload task is
1551			// using the BuildStats task name, but I've done this
1552			// to maintain existing behavior.
1553			b.Name = depName
1554			b.kitchenTask("upload_buildstats_results", OUTPUT_NONE)
1555			b.Name = uploadName
1556			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1557			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1558			b.usesGSUtil()
1559			b.dep(depName)
1560		})
1561	}
1562}
1563
1564// codesize generates a codesize task, which takes binary produced by a
1565// compile task, runs Bloaty against it, and uploads the resulting code size
1566// statistics to the GCS bucket belonging to the codesize.skia.org service.
1567func (b *jobBuilder) codesize() {
1568	compileTaskName := b.compile()
1569	compileTaskNameNoPatch := compileTaskName
1570	if b.extraConfig("Android") {
1571		compileTaskNameNoPatch += "_NoPatch" // add a second "extra config"
1572	} else {
1573		compileTaskNameNoPatch += "-NoPatch" // add the only "extra config"
1574	}
1575
1576	bloatyCipdPkg := b.MustGetCipdPackageFromAsset("bloaty")
1577
1578	b.addTask(b.Name, func(b *taskBuilder) {
1579		b.cas(CAS_EMPTY)
1580		b.dep(compileTaskName)
1581		b.dep(compileTaskNameNoPatch)
1582		cmd := []string{
1583			b.taskDriver("codesize", false),
1584			"--local=false",
1585			"--project_id", "skia-swarming-bots",
1586			"--task_id", specs.PLACEHOLDER_TASK_ID,
1587			"--task_name", b.Name,
1588			"--compile_task_name", compileTaskName,
1589			"--compile_task_name_no_patch", compileTaskNameNoPatch,
1590			// Note: the binary name cannot contain dashes, otherwise the naming
1591			// schema logic will partition it into multiple parts.
1592			//
1593			// If we ever need to define a CodeSize-* task for a binary with
1594			// dashes in its name (e.g. "my-binary"), a potential workaround is to
1595			// create a mapping from a new, non-dashed binary name (e.g. "my_binary")
1596			// to the actual binary name with dashes. This mapping can be hardcoded
1597			// in this function; no changes to the task driver would be necessary.
1598			"--binary_name", b.parts["binary_name"],
1599			"--bloaty_cipd_version", bloatyCipdPkg.Version,
1600			"--bloaty_binary", "bloaty/bloaty",
1601
1602			"--repo", specs.PLACEHOLDER_REPO,
1603			"--revision", specs.PLACEHOLDER_REVISION,
1604			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1605			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1606			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1607		}
1608		if strings.Contains(compileTaskName, "Android") {
1609			b.asset("android_ndk_linux")
1610			cmd = append(cmd, "--strip_binary",
1611				"android_ndk_linux/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip")
1612		} else {
1613			b.asset("binutils_linux_x64")
1614			cmd = append(cmd, "--strip_binary", "binutils_linux_x64/strip")
1615		}
1616		b.cmd(cmd...)
1617		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1618		b.cache(CACHES_WORKDIR...)
1619		b.usesLUCIAuth()
1620		b.asset("bloaty")
1621		b.serviceAccount("skia-external-codesize@skia-swarming-bots.iam.gserviceaccount.com")
1622		b.timeout(20 * time.Minute)
1623		b.attempts(1)
1624	})
1625}
1626
1627// doUpload indicates whether the given Job should upload its results.
1628func (b *jobBuilder) doUpload() bool {
1629	for _, s := range b.cfg.NoUpload {
1630		m, err := regexp.MatchString(s, b.Name)
1631		if err != nil {
1632			log.Fatal(err)
1633		}
1634		if m {
1635			return false
1636		}
1637	}
1638	return true
1639}
1640
1641// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
1642func (b *taskBuilder) commonTestPerfAssets() {
1643	// Docker-based tests don't need the standard CIPD assets
1644	if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) {
1645		return
1646	}
1647	if b.os("Android", "ChromeOS", "iOS") {
1648		b.asset("skp", "svg", "skimage")
1649	} else if b.extraConfig("OldestSupportedSkpVersion") {
1650		b.assetWithVersion("skp", oldestSupportedSkpVersion)
1651	} else {
1652		// for desktop machines
1653		b.asset("skimage", "skp", "svg")
1654	}
1655
1656	if b.isLinux() && b.matchExtraConfig("SAN") {
1657		b.asset("clang_linux")
1658	}
1659
1660	if b.isLinux() {
1661		if b.extraConfig("Vulkan") {
1662			b.asset("linux_vulkan_sdk")
1663		}
1664		if b.matchGpu("Intel") {
1665			if b.matchGpu("IrisXe") {
1666				b.asset("mesa_intel_driver_linux_22")
1667			} else {
1668				// Use this for legacy drivers that were culled in v22 of Mesa.
1669				// https://www.phoronix.com/scan.php?page=news_item&px=Mesa-22.0-Drops-OpenSWR
1670				b.asset("mesa_intel_driver_linux")
1671			}
1672		}
1673	}
1674
1675	if b.matchOs("Win") && b.extraConfig("DWriteCore") {
1676		b.asset("dwritecore")
1677	}
1678}
1679
1680// directUpload adds prerequisites for uploading to GCS.
1681func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) {
1682	b.recipeProp("gs_bucket", gsBucket)
1683	b.serviceAccount(serviceAccount)
1684	b.usesGSUtil()
1685}
1686
1687// dm generates a Test task using dm.
1688func (b *jobBuilder) dm() {
1689	compileTaskName := ""
1690	// LottieWeb doesn't require anything in Skia to be compiled.
1691	if !b.extraConfig("LottieWeb") {
1692		compileTaskName = b.compile()
1693	}
1694	directUpload := false
1695	b.addTask(b.Name, func(b *taskBuilder) {
1696		cas := CAS_TEST
1697		recipe := "test"
1698		if b.extraConfig("PathKit") {
1699			cas = CAS_PATHKIT
1700			recipe = "test_pathkit"
1701			if b.doUpload() {
1702				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1703				directUpload = true
1704			}
1705		} else if b.extraConfig("CanvasKit") {
1706			cas = CAS_CANVASKIT
1707			recipe = "test_canvaskit"
1708			if b.doUpload() {
1709				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1710				directUpload = true
1711			}
1712		} else if b.extraConfig("LottieWeb") {
1713			// CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes
1714			// more of the files, especially those brought in via DEPS in the
1715			// lottie-ci repo. The main difference between Perf.+LottieWeb and
1716			// Test.+LottieWeb is that the former pulls in the lottie build via
1717			// npm and the latter always tests at lottie's
1718			// ToT.
1719			cas = CAS_LOTTIE_CI
1720			recipe = "test_lottie_web"
1721			if b.doUpload() {
1722				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1723				directUpload = true
1724			}
1725		} else {
1726			// Default recipe supports direct upload.
1727			// TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil.
1728			// https://bugs.chromium.org/p/chromium/issues/detail?id=1192611
1729			if b.doUpload() && !b.matchOs("Win") {
1730				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1731				directUpload = true
1732			}
1733		}
1734		b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL)
1735		b.recipeProps(EXTRA_PROPS)
1736		iid := b.internalHardwareLabel()
1737		iidStr := ""
1738		if iid != nil {
1739			iidStr = strconv.Itoa(*iid)
1740		}
1741		if recipe == "test" {
1742			b.dmFlags(iidStr)
1743		}
1744		b.kitchenTask(recipe, OUTPUT_TEST)
1745		b.cas(cas)
1746		b.swarmDimensions()
1747		if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit") {
1748			b.usesDocker()
1749		}
1750		if compileTaskName != "" {
1751			b.dep(compileTaskName)
1752		}
1753		if b.matchOs("Android") && b.extraConfig("ASAN") {
1754			b.asset("android_ndk_linux")
1755		}
1756		if b.extraConfig("NativeFonts") && !b.matchOs("Android") {
1757			b.needsFontsForParagraphTests()
1758		}
1759		if b.extraConfig("Fontations") {
1760			b.cipd(&specs.CipdPackage{
1761				Name:    "chromium/third_party/googlefonts_testdata",
1762				Path:    "googlefonts_testdata",
1763				Version: "version:20230913",
1764			})
1765		}
1766		b.commonTestPerfAssets()
1767		if b.matchExtraConfig("Lottie") {
1768			b.asset("lottie-samples")
1769		}
1770		b.expiration(20 * time.Hour)
1771
1772		b.timeout(4 * time.Hour)
1773		if b.extraConfig("Valgrind") {
1774			b.timeout(9 * time.Hour)
1775			b.expiration(48 * time.Hour)
1776			b.asset("valgrind")
1777			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1778			// to ensure there are always bots free for CQ tasks.
1779			b.dimension("valgrind:1")
1780		} else if b.extraConfig("MSAN") {
1781			b.timeout(9 * time.Hour)
1782		} else if b.arch("x86") && b.debug() {
1783			// skia:6737
1784			b.timeout(6 * time.Hour)
1785		} else if b.matchOs("Mac11") {
1786			b.timeout(30 * time.Minute)
1787		}
1788		b.maybeAddIosDevImage()
1789	})
1790
1791	// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
1792	// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
1793	if b.doUpload() && !directUpload {
1794		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1795		depName := b.Name
1796		b.addTask(uploadName, func(b *taskBuilder) {
1797			b.recipeProp("gs_bucket", b.cfg.GsBucketGm)
1798			b.recipeProps(EXTRA_PROPS)
1799			b.kitchenTask("upload_dm_results", OUTPUT_NONE)
1800			b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1801			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1802			b.usesGSUtil()
1803			b.dep(depName)
1804		})
1805	}
1806}
1807
1808// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
1809// Canary-G3 does not use this path because it is very different from other autorollers.
1810func (b *jobBuilder) canary(rollerName, canaryCQKeyword, targetProjectBaseURL string) {
1811	b.addTask(b.Name, func(b *taskBuilder) {
1812		b.cas(CAS_EMPTY)
1813		b.cmd(
1814			b.taskDriver("canary", false),
1815			"--local=false",
1816			"--project_id", "skia-swarming-bots",
1817			"--task_id", specs.PLACEHOLDER_TASK_ID,
1818			"--task_name", b.Name,
1819			"--roller_name", rollerName,
1820			"--cq_keyword", canaryCQKeyword,
1821			"--target_project_base_url", targetProjectBaseURL,
1822			"--repo", specs.PLACEHOLDER_REPO,
1823			"--revision", specs.PLACEHOLDER_REVISION,
1824			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1825			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1826			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1827		)
1828		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1829		b.usesLUCIAuth()
1830		b.serviceAccount(b.cfg.ServiceAccountCanary)
1831		b.timeout(3 * time.Hour)
1832		b.attempts(1)
1833	})
1834}
1835
1836// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
1837// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
1838func (b *jobBuilder) puppeteer() {
1839	compileTaskName := b.compile()
1840	b.addTask(b.Name, func(b *taskBuilder) {
1841		b.defaultSwarmDimensions()
1842		b.usesNode()
1843		b.usesLUCIAuth()
1844		b.dep(compileTaskName)
1845		b.output(OUTPUT_PERF)
1846		b.timeout(60 * time.Minute)
1847		b.cas(CAS_PUPPETEER)
1848		b.serviceAccount(b.cfg.ServiceAccountCompile)
1849
1850		webglversion := "2"
1851		if b.extraConfig("WebGL1") {
1852			webglversion = "1"
1853		}
1854
1855		if b.extraConfig("SkottieFrames") {
1856			b.cmd(
1857				b.taskDriver("perf_puppeteer_skottie_frames", false),
1858				"--project_id", "skia-swarming-bots",
1859				"--git_hash", specs.PLACEHOLDER_REVISION,
1860				"--task_id", specs.PLACEHOLDER_TASK_ID,
1861				"--task_name", b.Name,
1862				"--canvaskit_bin_path", "./build",
1863				"--lotties_path", "./lotties_with_assets",
1864				"--node_bin_path", "./node/node/bin",
1865				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1866				"--output_path", OUTPUT_PERF,
1867				"--os_trace", b.parts["os"],
1868				"--model_trace", b.parts["model"],
1869				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1870				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1871				"--webgl_version", webglversion, // ignore when running with cpu backend
1872			)
1873			b.needsLottiesWithAssets()
1874		} else if b.extraConfig("RenderSKP") {
1875			b.cmd(
1876				b.taskDriver("perf_puppeteer_render_skps", false),
1877				"--project_id", "skia-swarming-bots",
1878				"--git_hash", specs.PLACEHOLDER_REVISION,
1879				"--task_id", specs.PLACEHOLDER_TASK_ID,
1880				"--task_name", b.Name,
1881				"--canvaskit_bin_path", "./build",
1882				"--skps_path", "./skp",
1883				"--node_bin_path", "./node/node/bin",
1884				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1885				"--output_path", OUTPUT_PERF,
1886				"--os_trace", b.parts["os"],
1887				"--model_trace", b.parts["model"],
1888				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1889				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1890				"--webgl_version", webglversion,
1891			)
1892			b.asset("skp")
1893		} else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite
1894			b.cmd(
1895				b.taskDriver("perf_puppeteer_canvas", false),
1896				"--project_id", "skia-swarming-bots",
1897				"--git_hash", specs.PLACEHOLDER_REVISION,
1898				"--task_id", specs.PLACEHOLDER_TASK_ID,
1899				"--task_name", b.Name,
1900				"--canvaskit_bin_path", "./build",
1901				"--node_bin_path", "./node/node/bin",
1902				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1903				"--output_path", OUTPUT_PERF,
1904				"--os_trace", b.parts["os"],
1905				"--model_trace", b.parts["model"],
1906				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1907				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1908				"--webgl_version", webglversion,
1909			)
1910			b.asset("skp")
1911		}
1912
1913	})
1914
1915	// Upload results to Perf after.
1916	// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
1917	uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1918	depName := b.Name
1919	b.addTask(uploadName, func(b *taskBuilder) {
1920		b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1921		b.recipeProps(EXTRA_PROPS)
1922		// TODO(borenet): I'm not sure why the upload task is
1923		// using the Perf task name, but I've done this to
1924		// maintain existing behavior.
1925		b.Name = depName
1926		b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1927		b.Name = uploadName
1928		b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1929		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1930		b.usesGSUtil()
1931		b.dep(depName)
1932	})
1933}
1934
1935// perf generates a Perf task.
1936func (b *jobBuilder) perf() {
1937	compileTaskName := ""
1938	// LottieWeb doesn't require anything in Skia to be compiled.
1939	if !b.extraConfig("LottieWeb") {
1940		compileTaskName = b.compile()
1941	}
1942	doUpload := !b.debug() && b.doUpload()
1943	b.addTask(b.Name, func(b *taskBuilder) {
1944		recipe := "perf"
1945		cas := CAS_PERF
1946		if b.extraConfig("PathKit") {
1947			cas = CAS_PATHKIT
1948			recipe = "perf_pathkit"
1949		} else if b.extraConfig("CanvasKit") {
1950			cas = CAS_CANVASKIT
1951			recipe = "perf_canvaskit"
1952		} else if b.extraConfig("SkottieTracing") {
1953			recipe = "perf_skottietrace"
1954		} else if b.extraConfig("SkottieWASM") {
1955			recipe = "perf_skottiewasm_lottieweb"
1956			cas = CAS_SKOTTIE_WASM
1957		} else if b.extraConfig("LottieWeb") {
1958			recipe = "perf_skottiewasm_lottieweb"
1959			cas = CAS_LOTTIE_WEB
1960		}
1961		b.recipeProps(EXTRA_PROPS)
1962		if recipe == "perf" {
1963			b.nanobenchFlags(doUpload)
1964		}
1965		b.kitchenTask(recipe, OUTPUT_PERF)
1966		b.cas(cas)
1967		b.swarmDimensions()
1968		if b.extraConfig("Docker") {
1969			b.usesDocker()
1970		}
1971		if compileTaskName != "" {
1972			b.dep(compileTaskName)
1973		}
1974		b.commonTestPerfAssets()
1975		b.expiration(20 * time.Hour)
1976		b.timeout(4 * time.Hour)
1977
1978		if b.extraConfig("Valgrind") {
1979			b.timeout(9 * time.Hour)
1980			b.expiration(48 * time.Hour)
1981			b.asset("valgrind")
1982			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1983			// to ensure there are always bots free for CQ tasks.
1984			b.dimension("valgrind:1")
1985		} else if b.extraConfig("MSAN") {
1986			b.timeout(9 * time.Hour)
1987		} else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" {
1988			// skia:6737
1989			b.timeout(6 * time.Hour)
1990		} else if b.matchOs("Mac11") {
1991			b.timeout(30 * time.Minute)
1992		}
1993
1994		if b.extraConfig("LottieWeb", "SkottieWASM") {
1995			b.asset("node", "lottie-samples")
1996		} else if b.matchExtraConfig("SkottieTracing") {
1997			b.needsLottiesWithAssets()
1998		} else if b.matchExtraConfig("Skottie") {
1999			b.asset("lottie-samples")
2000		}
2001
2002		if b.matchOs("Android") && b.cpu() {
2003			b.asset("text_blob_traces")
2004		}
2005		b.maybeAddIosDevImage()
2006
2007		iid := b.internalHardwareLabel()
2008		if iid != nil {
2009			b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid))
2010		}
2011	})
2012
2013	// Upload results if necessary.
2014	if doUpload {
2015		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
2016		depName := b.Name
2017		b.addTask(uploadName, func(b *taskBuilder) {
2018			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
2019			b.recipeProps(EXTRA_PROPS)
2020			// TODO(borenet): I'm not sure why the upload task is
2021			// using the Perf task name, but I've done this to
2022			// maintain existing behavior.
2023			b.Name = depName
2024			b.kitchenTask("upload_nano_results", OUTPUT_NONE)
2025			b.Name = uploadName
2026			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
2027			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
2028			b.usesGSUtil()
2029			b.dep(depName)
2030		})
2031	}
2032}
2033
2034// presubmit generates a task which runs the presubmit for this repo.
2035func (b *jobBuilder) presubmit() {
2036	b.addTask(b.Name, func(b *taskBuilder) {
2037		b.recipeProps(map[string]string{
2038			"category":         "cq",
2039			"patch_gerrit_url": "https://skia-review.googlesource.com",
2040			"patch_project":    "skia",
2041			"patch_ref":        specs.PLACEHOLDER_PATCH_REF,
2042			"reason":           "CQ",
2043			"repo_name":        "skia",
2044		})
2045		b.recipeProps(EXTRA_PROPS)
2046		b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE)
2047		b.cas(CAS_RUN_RECIPE)
2048		b.serviceAccount(b.cfg.ServiceAccountCompile)
2049		// Use MACHINE_TYPE_LARGE because it seems to save time versus
2050		// MEDIUM and we want presubmit to be fast.
2051		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
2052		b.usesGit()
2053		b.cipd(&specs.CipdPackage{
2054			Name:    "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build",
2055			Path:    "recipe_bundle",
2056			Version: "git_revision:bb122cd16700ab80bfcbd494b605dd11d4f5902d",
2057		})
2058	})
2059}
2060
2061// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
2062// We can use the same build for both CPU and GPU tests since the latter requires the code for the
2063// former anyway.
2064func (b *jobBuilder) compileWasmGMTests(compileName string) {
2065	b.addTask(compileName, func(b *taskBuilder) {
2066		b.attempts(1)
2067		b.usesDocker()
2068		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2069		b.usesLUCIAuth()
2070		b.output("wasm_out")
2071		b.timeout(60 * time.Minute)
2072		b.cas(CAS_COMPILE)
2073		b.serviceAccount(b.cfg.ServiceAccountCompile)
2074		b.cache(CACHES_DOCKER...)
2075		// For now, we only have one compile mode - a GPU release mode. This should be sufficient to
2076		// run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because
2077		// when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not
2078		// even bother.
2079		b.cmd(
2080			b.taskDriver("compile_wasm_gm_tests", false),
2081			"--project_id", "skia-swarming-bots",
2082			"--task_id", specs.PLACEHOLDER_TASK_ID,
2083			"--task_name", compileName,
2084			"--out_path", "./wasm_out",
2085			"--skia_path", "./skia",
2086			"--work_path", "./cache/docker/wasm_gm",
2087		)
2088	})
2089}
2090
2091// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
2092// We can use the same build for both CPU and GPU tests since the latter requires the code for the
2093// former anyway.
2094func (b *jobBuilder) runWasmGMTests() {
2095	compileTaskName := b.compile()
2096
2097	b.addTask(b.Name, func(b *taskBuilder) {
2098		b.attempts(1)
2099		b.usesNode()
2100		b.swarmDimensions()
2101		b.usesLUCIAuth()
2102		b.cipd(CIPD_PKGS_GOLDCTL)
2103		b.dep(compileTaskName)
2104		b.timeout(60 * time.Minute)
2105		b.cas(CAS_WASM_GM)
2106		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
2107		b.cmd(
2108			b.taskDriver("run_wasm_gm_tests", false),
2109			"--project_id", "skia-swarming-bots",
2110			"--task_id", specs.PLACEHOLDER_TASK_ID,
2111			"--task_name", b.Name,
2112			"--test_harness_path", "./tools/run-wasm-gm-tests",
2113			"--built_path", "./wasm_out",
2114			"--node_bin_path", "./node/node/bin",
2115			"--resource_path", "./resources",
2116			"--work_path", "./wasm_gm/work",
2117			"--gold_ctl_path", "./cipd_bin_packages/goldctl",
2118			"--gold_hashes_url", b.cfg.GoldHashesURL,
2119			"--git_commit", specs.PLACEHOLDER_REVISION,
2120			"--changelist_id", specs.PLACEHOLDER_ISSUE,
2121			"--patchset_order", specs.PLACEHOLDER_PATCHSET,
2122			"--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
2123			// TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on.
2124			"--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config
2125			"--gold_key", "alpha_type:Premul",
2126			"--gold_key", "arch:wasm",
2127			"--gold_key", "browser:Chrome",
2128			"--gold_key", "color_depth:8888",
2129			"--gold_key", "config:gles",
2130			"--gold_key", "configuration:Release",
2131			"--gold_key", "cpu_or_gpu_value:QuadroP400",
2132			"--gold_key", "model:Golo",
2133			"--gold_key", "os:Ubuntu18",
2134		)
2135	})
2136}
2137
2138// labelAndSavedOutputDir contains a Bazel label (e.g. //tests:some_test) and a //bazel-bin
2139// subdirectory that should be stored into CAS.
2140type labelAndSavedOutputDir struct {
2141	label          string
2142	savedOutputDir string
2143}
2144
2145// Maps a shorthand version of a label (which can be an arbitrary string) to an absolute Bazel
2146// label or "target pattern" https://bazel.build/docs/build#specifying-build-targets
2147// The reason we need this mapping is because Buildbucket build names cannot have / or : in them.
2148// TODO(borenet/kjlubick): Is there a way to generate a mapping using `bazel query`?
2149var shorthandToLabel = map[string]labelAndSavedOutputDir{
2150	"all_tests":                  {"//tests:linux_rbe_tests", ""},
2151	"core":                       {"//:core", ""},
2152	"cpu_8888_benchmark_test":    {"//bench:cpu_8888_test", ""},
2153	"cpu_gms":                    {"//gm:cpu_gm_tests", ""},
2154	"full_library":               {"//tools:full_build", ""},
2155	"ganesh_gl":                  {"//:ganesh_gl", ""},
2156	"hello_bazel_world_test":     {"//gm:hello_bazel_world_test", ""},
2157	"modules_canvaskit":          {"//modules/canvaskit:canvaskit", ""},
2158	"modules_canvaskit_js_tests": {"//modules/canvaskit:canvaskit_js_tests", ""},
2159	"skottie_tool_gpu":           {"//modules/skottie:skottie_tool_gpu", ""},
2160	"viewer":                     {"//tools/viewer:viewer", ""},
2161	"decode_everything":          {"//example/external_client:decode_everything", ""},
2162	"path_combiner":              {"//example/external_client:path_combiner", ""},
2163	"png_decoder":                {"//example/external_client:png_decoder", ""},
2164	"shape_text":                 {"//example/external_client:shape_text", ""},
2165	"svg_with_harfbuzz":          {"//example/external_client:svg_with_harfbuzz", ""},
2166	"svg_with_primitive":         {"//example/external_client:svg_with_primitive", ""},
2167	"use_ganesh_gl":              {"//example/external_client:use_ganesh_gl", ""},
2168	"use_ganesh_vulkan":          {"//example/external_client:use_ganesh_vulkan", ""},
2169	"use_graphite_native_vulkan": {"//example/external_client:use_graphite_native_vulkan", ""},
2170	"use_skresources":            {"//example/external_client:use_skresources", ""},
2171	"write_text_to_png":          {"//example/external_client:write_text_to_png", ""},
2172	"write_to_pdf":               {"//example/external_client:write_to_pdf", ""},
2173
2174	// Currently there is no way to tell Bazel "only test go_test targets", so we must group them
2175	// under a test_suite.
2176	//
2177	// Alternatives:
2178	//
2179	// - Use --test_lang_filters, which currently does not work for non-native rules. See
2180	//   https://github.com/bazelbuild/bazel/issues/12618.
2181	//
2182	// - As suggested in the same GitHub issue, "bazel query 'kind(go_test, //...)'" would normally
2183	//   return the list of labels. However, this fails due to BUILD.bazel files in
2184	//   //third_party/externals and //bazel/external/vello. We could try either fixing those files
2185	//   when possible, or adding them to //.bazelignore (either permanently or temporarily inside a
2186	//   specialized task driver just for Go tests).
2187	//
2188	// - Have Gazelle add a tag to all Go tests: go_test(name = "foo_test", tag = "go", ... ). Then,
2189	//   we can use a wildcard label such as //... and tell Bazel to only test those targets with
2190	//   said tag, e.g. "bazel test //... --test_tag_filters=go"
2191	//   (https://bazel.build/reference/command-line-reference#flag--test_tag_filters). Today this
2192	//   does not work due to the third party and external BUILD.bazel files mentioned in the
2193	//   previous bullet point.
2194	"all_go_tests": {"//:all_go_tests", ""},
2195
2196	// Android tests that run on a device. We store the //bazel-bin/tests directory into CAS for use
2197	// by subsequent CI tasks.
2198	"android_math_test":               {"//tests:android_math_test", "tests"},
2199	"hello_bazel_world_android_test":  {"//gm:hello_bazel_world_android_test", "gm"},
2200	"cpu_8888_benchmark_android_test": {"//bench:cpu_8888_android_test", "bench"},
2201}
2202
2203// bazelBuild adds a task which builds the specified single-target label (//foo:bar) or
2204// multi-target label (//foo/...) using Bazel. Depending on the host we run this on, we may
2205// specify additional Bazel args to build faster. Optionally, a subset of the //bazel-bin directory
2206// will be stored into CAS for use by subsequent tasks.
2207func (b *jobBuilder) bazelBuild() {
2208	shorthand, config, host := b.parts.bazelBuildParts()
2209	labelAndSavedOutputDir, ok := shorthandToLabel[shorthand]
2210	if !ok {
2211		panic("unsupported Bazel label shorthand " + shorthand)
2212	}
2213
2214	b.addTask(b.Name, func(b *taskBuilder) {
2215		bazelCacheDir, ok := map[string]string{
2216			// We only run builds in GCE.
2217			"linux_x64":   bazelCacheDirOnGCELinux,
2218			"windows_x64": bazelCacheDirOnWindows,
2219		}[host]
2220		if !ok {
2221			panic("unknown Bazel cache dir for Bazel host " + host)
2222		}
2223
2224		// Bazel git_repository rules shell out to git. Use the version from
2225		// CIPD to ensure that we're not using an old locally-installed version.
2226		b.usesGit()
2227		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
2228
2229		cmd := []string{
2230			b.taskDriver("bazel_build", host != "windows_x64"),
2231			"--project_id=skia-swarming-bots",
2232			"--task_id=" + specs.PLACEHOLDER_TASK_ID,
2233			"--task_name=" + b.Name,
2234			"--bazel_label=" + labelAndSavedOutputDir.label,
2235			"--bazel_config=" + config,
2236			"--bazel_cache_dir=" + bazelCacheDir,
2237			"--workdir=./skia",
2238		}
2239
2240		if labelAndSavedOutputDir.savedOutputDir != "" {
2241			cmd = append(cmd,
2242				"--out_path="+OUTPUT_BAZEL,
2243				// Which //bazel-bin subdirectory to copy into the output dir (flag --out_path).
2244				"--saved_output_dir="+labelAndSavedOutputDir.savedOutputDir,
2245			)
2246		}
2247
2248		if host == "linux_x64" {
2249			b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2250			b.usesBazel("linux_x64")
2251			if labelAndSavedOutputDir.savedOutputDir != "" {
2252				// We assume that builds which require storing a subset of //bazel-bin to CAS are Android
2253				// builds. We want such builds to use RBE, and we want to download the built top-level
2254				// artifacts. Also, we need the adb_test runner to be cross-compiled to run on a Raspberry
2255				// Pi.
2256				cmd = append(cmd, "--bazel_arg=--config=linux_rbe")
2257				cmd = append(cmd, "--bazel_arg=--jobs=100")
2258				cmd = append(cmd, "--bazel_arg=--remote_download_toplevel")
2259				cmd = append(cmd, "--bazel_arg=--adb_platform=linux_arm64")
2260			} else {
2261				// We want all Linux Bazel Builds to use RBE
2262				cmd = append(cmd, "--bazel_arg=--config=for_linux_x64_with_rbe")
2263				cmd = append(cmd, "--bazel_arg=--jobs=100")
2264				cmd = append(cmd, "--bazel_arg=--remote_download_minimal")
2265			}
2266		} else if host == "windows_x64" {
2267			b.dimension(
2268				"cpu:x86-64-Haswell_GCE",
2269				"gpu:none",
2270				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_LARGE),
2271				fmt.Sprintf("os:%s", DEFAULT_OS_WIN_GCE),
2272				"pool:Skia",
2273			)
2274			b.usesBazel("windows_x64")
2275			cmd = append(cmd, "--bazel_arg=--experimental_scale_timeouts=2.0")
2276		} else {
2277			panic("unsupported Bazel host " + host)
2278		}
2279		b.cmd(cmd...)
2280
2281		b.idempotent()
2282		b.cas(CAS_BAZEL)
2283		b.attempts(1)
2284		b.serviceAccount(b.cfg.ServiceAccountCompile)
2285		if labelAndSavedOutputDir.savedOutputDir != "" {
2286			b.output(OUTPUT_BAZEL)
2287		}
2288	})
2289}
2290
2291type precompiledBazelTestKind int
2292
2293const (
2294	precompiledBazelTestNone precompiledBazelTestKind = iota
2295	precompiledBenchmarkTest
2296	precompiledGMTest
2297	precompiledUnitTest
2298)
2299
2300func (b *jobBuilder) bazelTest() {
2301	taskdriverName, shorthand, buildConfig, host, testConfig := b.parts.bazelTestParts()
2302	labelAndSavedOutputDir, ok := shorthandToLabel[shorthand]
2303	if !ok {
2304		panic("unsupported Bazel label shorthand " + shorthand)
2305	}
2306
2307	// Expand task driver name to keep task names short.
2308	precompiledKind := precompiledBazelTestNone
2309	if taskdriverName == "precompiled_benchmark" {
2310		taskdriverName = "bazel_test_precompiled"
2311		precompiledKind = precompiledBenchmarkTest
2312	}
2313	if taskdriverName == "precompiled_gm" {
2314		taskdriverName = "bazel_test_precompiled"
2315		precompiledKind = precompiledGMTest
2316	}
2317	if taskdriverName == "precompiled_test" {
2318		taskdriverName = "bazel_test_precompiled"
2319		precompiledKind = precompiledUnitTest
2320	}
2321	if taskdriverName == "gm" {
2322		taskdriverName = "bazel_test_gm"
2323	}
2324	if taskdriverName == "benchmark" {
2325		taskdriverName = "bazel_test_benchmark"
2326	}
2327
2328	var deviceSpecificBazelConfig *device_specific_configs.Config
2329	if testConfig != "" {
2330		if config, ok := device_specific_configs.Configs[testConfig]; ok {
2331			deviceSpecificBazelConfig = &config
2332		} else {
2333			panic(fmt.Sprintf("Unknown device-specific Bazel config: %q", testConfig))
2334		}
2335	}
2336
2337	bazelCacheDir := bazelCacheDirOnGCELinux
2338	if deviceSpecificBazelConfig != nil && deviceSpecificBazelConfig.Keys["model"] != "GCE" {
2339		bazelCacheDir = bazelCacheDirOnSkoloLinux
2340	}
2341
2342	b.addTask(b.Name, func(b *taskBuilder) {
2343		cmd := []string{
2344			b.taskDriver(taskdriverName, false),
2345			"--project_id=skia-swarming-bots",
2346			"--task_id=" + specs.PLACEHOLDER_TASK_ID,
2347			"--task_name=" + b.Name,
2348			"--workdir=.",
2349		}
2350
2351		switch taskdriverName {
2352		case "canvaskit_gold":
2353			cmd = append(cmd,
2354				"--bazel_label="+labelAndSavedOutputDir.label,
2355				"--bazel_config="+buildConfig,
2356				"--bazel_cache_dir="+bazelCacheDir,
2357				"--goldctl_path=./cipd_bin_packages/goldctl",
2358				"--git_commit="+specs.PLACEHOLDER_REVISION,
2359				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2360				"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2361				"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2362			b.cipd(CIPD_PKGS_GOLDCTL)
2363			switch buildConfig {
2364			case "ck_full_cpu_release_chrome":
2365				cmd = append(cmd, "--cpu_or_gpu=CPU", "--cpu_or_gpu_value=CPU",
2366					"--compilation_mode=Release", "--browser=Chrome")
2367			case "ck_full_webgl2_release_chrome":
2368				cmd = append(cmd, "--cpu_or_gpu=GPU", "--cpu_or_gpu_value=WebGL2",
2369					"--compilation_mode=Release", "--browser=Chrome")
2370			default:
2371				panic("Gold keys not specified for config " + buildConfig)
2372			}
2373
2374		case "cpu_tests":
2375			cmd = append(cmd,
2376				"--bazel_label="+labelAndSavedOutputDir.label,
2377				"--bazel_config="+buildConfig,
2378				"--bazel_cache_dir="+bazelCacheDir)
2379
2380		case "toolchain_layering_check":
2381			cmd = append(cmd,
2382				"--bazel_label="+labelAndSavedOutputDir.label,
2383				"--bazel_config="+buildConfig,
2384				"--bazel_cache_dir="+bazelCacheDir)
2385
2386		case "bazel_test_precompiled":
2387			// Compute the file name of the test based on its Bazel label. The file name will be relative to
2388			// the bazel-bin directory, which we receive a subset of as a CAS input.
2389			command := strings.ReplaceAll(labelAndSavedOutputDir.label, "//", "")
2390			command = strings.ReplaceAll(command, ":", "/")
2391			command = path.Join(OUTPUT_BAZEL, command)
2392
2393			// The test's working directory will be its runfiles directory, which simulates the behavior of
2394			// the "bazel run" command.
2395			commandWorkDir := path.Join(command+".runfiles", "skia")
2396
2397			cmd = append(cmd,
2398				"--command="+command,
2399				"--command_workdir="+commandWorkDir)
2400
2401			switch precompiledKind {
2402			case precompiledBenchmarkTest:
2403				cmd = append(cmd,
2404					"--kind=benchmark",
2405					"--git_commit="+specs.PLACEHOLDER_REVISION,
2406					"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2407					"--patchset_order="+specs.PLACEHOLDER_PATCHSET)
2408
2409			case precompiledGMTest:
2410				cmd = append(cmd,
2411					"--kind=gm",
2412					"--bazel_label="+labelAndSavedOutputDir.label,
2413					"--goldctl_path=./cipd_bin_packages/goldctl",
2414					"--git_commit="+specs.PLACEHOLDER_REVISION,
2415					"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2416					"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2417					"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2418				b.cipd(CIPD_PKGS_GOLDCTL)
2419
2420			case precompiledUnitTest:
2421				cmd = append(cmd, "--kind=unit")
2422
2423			default:
2424				panic(fmt.Sprintf("Unknown precompiled test kind: %v", precompiledKind))
2425			}
2426
2427		case "bazel_test_gm":
2428			cmd = append(cmd,
2429				"--bazel_label="+labelAndSavedOutputDir.label,
2430				"--bazel_config="+buildConfig,
2431				"--bazel_cache_dir="+bazelCacheDir,
2432				"--goldctl_path=./cipd_bin_packages/goldctl",
2433				"--git_commit="+specs.PLACEHOLDER_REVISION,
2434				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2435				"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2436				"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2437			b.cipd(CIPD_PKGS_GOLDCTL)
2438
2439		case "bazel_test_benchmark":
2440			// Note that these tasks run on Skolo machines.
2441			cmd = append(cmd,
2442				"--bazel_label="+labelAndSavedOutputDir.label,
2443				"--bazel_config="+buildConfig,
2444				"--bazel_cache_dir="+bazelCacheDirOnSkoloLinux,
2445				"--git_commit="+specs.PLACEHOLDER_REVISION,
2446				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2447				"--patchset_order="+specs.PLACEHOLDER_PATCHSET)
2448
2449		case "external_client":
2450			// For external_client, we want to test how an external user would
2451			// build using Skia. Therefore, we change to the workspace in that
2452			// directory and use labels relative to it.
2453			pathInSkia := "example/external_client"
2454			label := strings.Replace(labelAndSavedOutputDir.label, pathInSkia, "", -1)
2455			cmd = append(cmd,
2456				"--bazel_label="+label,
2457				"--path_in_skia="+pathInSkia,
2458				"--bazel_cache_dir="+bazelCacheDir)
2459			b.usesDocker()
2460
2461		default:
2462			panic("Unsupported Bazel taskdriver " + taskdriverName)
2463		}
2464
2465		if deviceSpecificBazelConfig != nil {
2466			cmd = append(cmd, "--device_specific_bazel_config="+deviceSpecificBazelConfig.Name)
2467		}
2468
2469		if host == "linux_x64" {
2470			b.usesBazel("linux_x64")
2471		} else if host == "linux_arm64" || host == "on_rpi" {
2472			// The RPIs do not run Bazel directly, they have precompiled binary
2473			// to run instead.
2474		} else {
2475			panic("unsupported Bazel host " + host)
2476		}
2477
2478		if taskdriverName == "bazel_test_gm" ||
2479			taskdriverName == "bazel_test_benchmark" ||
2480			taskdriverName == "bazel_test_precompiled" {
2481			if taskdriverName == "bazel_test_precompiled" {
2482				// This task precompiles the test and stores it to CAS.
2483				b.dep(fmt.Sprintf("BazelBuild-%s-%s-linux_x64", shorthand, buildConfig))
2484			}
2485
2486			// Set dimensions.
2487			if deviceSpecificBazelConfig == nil {
2488				log.Fatalf("While processing job %q: task driver %q requires a device-specific Bazel config.", b.Name, taskdriverName)
2489			}
2490			if len(deviceSpecificBazelConfig.SwarmingDimensions) == 0 {
2491				log.Fatalf("While processing job %q: device-specific Bazel config %q does not provide Swarming dimensions.", b.Name, deviceSpecificBazelConfig.Name)
2492			}
2493			var dimensions []string
2494			for name, value := range deviceSpecificBazelConfig.SwarmingDimensions {
2495				dimensions = append(dimensions, fmt.Sprintf("%s:%s", name, value))
2496			}
2497			dimensions = append(dimensions, fmt.Sprintf("pool:%s", b.cfg.Pool))
2498			sort.Strings(dimensions)
2499			b.dimension(dimensions...)
2500		} else {
2501			b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2502		}
2503
2504		b.cmd(cmd...)
2505		b.idempotent()
2506		b.cas(CAS_BAZEL)
2507		b.attempts(1)
2508		b.serviceAccount(b.cfg.ServiceAccountCompile)
2509	})
2510}
2511