xref: /aosp_15_r20/external/toolchain-utils/afdo_tools/update_kernel_afdo.py (revision 760c253c1ed00ce9abd48f8546f08516e57485fe)
1#!/usr/bin/env python3
2# Copyright 2024 The ChromiumOS Authors
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""This script updates kernel profiles based on what's available in gs://.
7
8It supports updating on canary, stable, and beta branches.
9"""
10
11import argparse
12import dataclasses
13import datetime
14import enum
15import json
16import logging
17import os
18from pathlib import Path
19import re
20import shlex
21import subprocess
22import sys
23from typing import Dict, Iterable, List, Optional, Tuple
24
25from cros_utils import git_utils
26
27
28# Folks who should be on the R-line of any CLs that get uploaded.
29CL_REVIEWERS = (git_utils.REVIEWER_DETECTIVE,)
30
31# Folks who should be on the CC-line of any CLs that get uploaded.
32CL_CC = (
33    "[email protected]",
34    "[email protected]",
35)
36
37# Determine which gsutil to use.
38# 'gsutil.py' is provided by depot_tools, whereas 'gsutil'
39# is provided by either https://cloud.google.com/sdk/docs/install, or
40# the 'google-cloud-cli' package. Since we need depot_tools to even
41# use 'repo', 'gsutil.py' is guaranteed to exist.
42GSUTIL = "gsutil.py"
43
44
45class Arch(enum.Enum):
46    """An enum for CPU architectures."""
47
48    AMD64 = "amd64"
49    ARM = "arm"
50
51    @property
52    def cwp_gs_location(self) -> str:
53        """Returns the location in gs:// where these profiles live."""
54        if self == self.AMD64:
55            return "gs://chromeos-prebuilt/afdo-job/vetted/kernel/amd64"
56        if self == self.ARM:
57            return "gs://chromeos-prebuilt/afdo-job/vetted/kernel/arm"
58        assert False, f"Uncovered arch -> gs:// mapping for {self}"
59
60
61@dataclasses.dataclass(frozen=True, eq=True, order=True)
62class KernelVersion:
63    """A class representing a version of the kernel."""
64
65    major: int
66    minor: int
67
68    def __str__(self):
69        return f"{self.major}.{self.minor}"
70
71    @classmethod
72    def parse(cls, val: str) -> "KernelVersion":
73        m = re.fullmatch(r"(\d+).(\d+)", val)
74        if not m:
75            raise ValueError(f"{val!r} is an invalid kernel version")
76        return cls(major=int(m.group(1)), minor=int(m.group(2)))
77
78
79# Versions that rolling should be skipped on, for one reason or another.
80SKIPPED_VERSIONS: Dict[int, Iterable[Tuple[Arch, KernelVersion]]] = {
81    # Kernel tracing was disabled on ARM in 114, b/275560674
82    114: ((Arch.ARM, KernelVersion(5, 15)),),
83    115: ((Arch.ARM, KernelVersion(5, 15)),),
84}
85
86
87class Channel(enum.Enum):
88    """An enum that discusses channels."""
89
90    # Ordered from closest-to-ToT to farthest-from-ToT
91    CANARY = "canary"
92    BETA = "beta"
93    STABLE = "stable"
94
95    @classmethod
96    def parse(cls, val: str) -> "Channel":
97        for x in cls:
98            if val == x.value:
99                return x
100        raise ValueError(
101            f"No such channel: {val!r}; try one of {[x.value for x in cls]}"
102        )
103
104
105@dataclasses.dataclass(frozen=True)
106class ProfileSelectionInfo:
107    """Preferences about profiles to select."""
108
109    # A consistent timestamp for the program to run with.
110    now: datetime.datetime
111
112    # Maximum age of a profile that can be selected.
113    max_profile_age: datetime.timedelta
114
115
116def get_parser():
117    """Returns an argument parser for this script."""
118    parser = argparse.ArgumentParser(
119        description=__doc__,
120        formatter_class=argparse.RawDescriptionHelpFormatter,
121    )
122    parser.add_argument(
123        "--debug",
124        action="store_true",
125        help="Enable debug logging.",
126    )
127    parser.add_argument(
128        "--upload",
129        action="store_true",
130        help="Automatically upload all changes that were made.",
131    )
132    parser.add_argument(
133        "--fetch",
134        action="store_true",
135        help="Run `git fetch` in toolchain-utils prior to running.",
136    )
137    parser.add_argument(
138        "--max-age-days",
139        type=int,
140        default=10,
141        help="""
142        The maximum number of days old a kernel profile can be before
143        it's ignored by this script. Default: %(default)s
144        """,
145    )
146    parser.add_argument(
147        "--chromeos-tree",
148        type=Path,
149        help="""
150        Root of a ChromeOS tree. This is optional to pass in, but doing so
151        unlocks extra convenience features on `--upload`. This script will try
152        to autodetect a tree if this isn't specified.
153        """,
154    )
155    parser.add_argument(
156        "channel",
157        nargs="*",
158        type=Channel.parse,
159        default=list(Channel),
160        help=f"""
161        Channel(s) to update. If none are passed, this will update all
162        channels. Choose from {[x.value for x in Channel]}.
163        """,
164    )
165    return parser
166
167
168@dataclasses.dataclass(frozen=True, eq=True, order=True)
169class GitBranch:
170    """Represents a ChromeOS branch."""
171
172    remote: str
173    release_number: int
174    branch_name: str
175
176
177def git_checkout(git_dir: Path, branch: GitBranch) -> None:
178    subprocess.run(
179        [
180            "git",
181            "checkout",
182            "--quiet",
183            f"{branch.remote}/{branch.branch_name}",
184        ],
185        check=True,
186        cwd=git_dir,
187        stdin=subprocess.DEVNULL,
188    )
189
190
191def git_fetch(git_dir: Path) -> None:
192    subprocess.run(
193        ["git", "fetch"],
194        check=True,
195        cwd=git_dir,
196        stdin=subprocess.DEVNULL,
197    )
198
199
200def git_rev_parse(git_dir: Path, ref_or_sha: str) -> str:
201    return subprocess.run(
202        ["git", "rev-parse", ref_or_sha],
203        check=True,
204        cwd=git_dir,
205        stdin=subprocess.DEVNULL,
206        stdout=subprocess.PIPE,
207        encoding="utf-8",
208    ).stdout.strip()
209
210
211def autodetect_branches(toolchain_utils: Path) -> Dict[Channel, GitBranch]:
212    """Returns GitBranches for each branch type in toolchain_utils."""
213    stdout = subprocess.run(
214        [
215            "git",
216            "branch",
217            "-r",
218        ],
219        cwd=toolchain_utils,
220        check=True,
221        stdin=subprocess.DEVNULL,
222        stdout=subprocess.PIPE,
223        encoding="utf-8",
224    ).stdout
225
226    # Match "${remote}/release-R${branch_number}-${build}.B"
227    branch_re = re.compile(r"([^/]+)/(release-R(\d+)-\d+\.B)")
228    branches = []
229    for line in stdout.splitlines():
230        line = line.strip()
231        if m := branch_re.fullmatch(line):
232            remote, branch_name, branch_number = m.groups()
233            branches.append(GitBranch(remote, int(branch_number), branch_name))
234
235    branches.sort(key=lambda x: x.release_number)
236    if len(branches) < 2:
237        raise ValueError(
238            f"Expected at least two branches, but only found {len(branches)}"
239        )
240
241    stable = branches[-2]
242    beta = branches[-1]
243    canary = GitBranch(
244        remote=beta.remote,
245        release_number=beta.release_number + 1,
246        branch_name="main",
247    )
248    return {
249        Channel.CANARY: canary,
250        Channel.BETA: beta,
251        Channel.STABLE: stable,
252    }
253
254
255@dataclasses.dataclass(frozen=True, eq=True, order=True)
256class ArchUpdateConfig:
257    """The AFDO update config for one architecture."""
258
259    versions_to_track: List[KernelVersion]
260    metadata_file: Path
261
262
263def read_update_cfg_file(
264    toolchain_utils: Path, file_path: Path
265) -> Dict[Arch, ArchUpdateConfig]:
266    """Reads `update_kernel_afdo.cfg`."""
267    # These files were originally meant to be `source`d in bash, and are very
268    # simple. These are read from branches, so we'd need cherry-picks to go
269    # back and replace them with a singular format. Could be nice to move to
270    # JSON or something.
271
272    # Parse assignments that look like `FOO="bar"`. No escaping or variable
273    # expansion is supported.
274    kv_re = re.compile(r'^([a-zA-Z_0-9]+)="([^"]*)"(?:\s*#.*)?', re.MULTILINE)
275    kvs = kv_re.findall(file_path.read_text(encoding="utf-8"))
276    # Subtle: the regex above makes it so `kv_re.findall` returns a series of
277    # (variable_name, variable_value).
278    settings = dict(kvs)
279
280    logging.debug("Parsing cfg file gave back settings: %s", settings)
281    archs = (
282        (Arch.AMD64, "AMD"),
283        (Arch.ARM, "ARM"),
284    )
285
286    results = {}
287    for arch, arch_var_name in archs:
288        # This is a space-separated list of kernel versions.
289        kernel_versions = settings[f"{arch_var_name}_KVERS"]
290        parsed_versions = [
291            KernelVersion.parse(x) for x in kernel_versions.split()
292        ]
293
294        metadata_file = settings[f"{arch_var_name}_METADATA_FILE"]
295        results[arch] = ArchUpdateConfig(
296            versions_to_track=parsed_versions,
297            metadata_file=toolchain_utils / metadata_file,
298        )
299    return results
300
301
302@dataclasses.dataclass(frozen=True, eq=True)
303class KernelGsProfile:
304    """Represents a kernel profile in gs://."""
305
306    release_number: int
307    chrome_build: str
308    cwp_timestamp: int
309    suffix: str
310    gs_timestamp: datetime.datetime
311
312    _FILE_NAME_PARSE_RE = re.compile(r"R(\d+)-(\d+\.\d+)-(\d+)(\..+\..+)")
313
314    @property
315    def file_name_no_suffix(self):
316        return (
317            f"R{self.release_number}-{self.chrome_build}-{self.cwp_timestamp}"
318        )
319
320    @property
321    def file_name(self):
322        return f"{self.file_name_no_suffix}{self.suffix}"
323
324    @classmethod
325    def from_file_name(
326        cls, timestamp: datetime.datetime, file_name: str
327    ) -> "KernelGsProfile":
328        m = cls._FILE_NAME_PARSE_RE.fullmatch(file_name)
329        if not m:
330            raise ValueError(f"{file_name!r} doesn't parse as a profile name")
331        release_number, chrome_build, cwp_timestamp, suffix = m.groups()
332        return cls(
333            release_number=int(release_number),
334            chrome_build=chrome_build,
335            cwp_timestamp=int(cwp_timestamp),
336            suffix=suffix,
337            gs_timestamp=timestamp,
338        )
339
340
341def datetime_from_gs_time(timestamp_str: str) -> datetime.datetime:
342    """Parses a datetime from gs."""
343    return datetime.datetime.strptime(
344        timestamp_str, "%Y-%m-%dT%H:%M:%SZ"
345    ).replace(tzinfo=datetime.timezone.utc)
346
347
348class KernelProfileFetcher:
349    """Fetches kernel profiles from gs://. Caches results."""
350
351    def __init__(self):
352        self._cached_results: Dict[str, List[KernelGsProfile]] = {}
353
354    @staticmethod
355    def _parse_gs_stdout(stdout: str) -> List[KernelGsProfile]:
356        line_re = re.compile(r"\s*\d+\s+(\S+T\S+)\s+(gs://.+)")
357        results = []
358        # Ignore the last line, since that's "TOTAL:"
359        for line in stdout.splitlines()[:-1]:
360            line = line.strip()
361            if not line:
362                continue
363            m = line_re.fullmatch(line)
364            if m is None:
365                raise ValueError(f"Unexpected line from gs: {line!r}")
366            timestamp_str, gs_url = m.groups()
367            timestamp = datetime_from_gs_time(timestamp_str)
368            file_name = os.path.basename(gs_url)
369            results.append(KernelGsProfile.from_file_name(timestamp, file_name))
370        return results
371
372    @classmethod
373    def _fetch_impl(cls, gs_url: str) -> List[KernelGsProfile]:
374        cmd = [
375            GSUTIL,
376            "ls",
377            "-l",
378            gs_url,
379        ]
380        result = subprocess.run(
381            cmd,
382            check=False,
383            stdin=subprocess.DEVNULL,
384            stdout=subprocess.PIPE,
385            stderr=subprocess.PIPE,
386            encoding="utf-8",
387        )
388
389        if result.returncode:
390            # If nothing could be found, gsutil will exit after printing this.
391            if "One or more URLs matched no objects." in result.stderr:
392                return []
393            logging.error(
394                "%s failed; stderr:\n%s", shlex.join(cmd), result.stderr
395            )
396            result.check_returncode()
397            assert False, "unreachable"
398
399        return cls._parse_gs_stdout(result.stdout)
400
401    def fetch(self, gs_url: str) -> List[KernelGsProfile]:
402        cached = self._cached_results.get(gs_url)
403        if cached is None:
404            logging.info("Fetching profiles from %s...", gs_url)
405            fetched = self._fetch_impl(gs_url)
406            logging.info("Found %d profiles in %s", len(fetched), gs_url)
407            self._cached_results[gs_url] = fetched
408            cached = fetched
409
410        # Create a copy to keep mutations from causing problems.
411        # KernelGsProfiles are frozen, at least.
412        return cached[:]
413
414
415def find_newest_afdo_artifact(
416    fetcher: KernelProfileFetcher,
417    arch: Arch,
418    kernel_version: KernelVersion,
419    release_number: int,
420) -> Optional[KernelGsProfile]:
421    """Returns info about the latest AFDO artifact for the given parameters."""
422    gs_base_location = arch.cwp_gs_location
423    kernel_profile_dir = os.path.join(gs_base_location, str(kernel_version))
424    kernel_profiles = fetcher.fetch(kernel_profile_dir)
425    if not kernel_profiles:
426        logging.error(
427            "Failed to find any kernel profiles in %s", kernel_profile_dir
428        )
429        return None
430
431    valid_profiles = [
432        x for x in kernel_profiles if x.release_number == release_number
433    ]
434    if not valid_profiles:
435        logging.warning(
436            "Failed to find any M%d kernel profiles in %s",
437            release_number,
438            kernel_profile_dir,
439        )
440        return None
441
442    # We want the most recently uploaded profile, since that should correspond
443    # with the newest profile. If there're multiple profiles for some reason,
444    # choose what _should_ be a consistent tie-breaker.
445    return max(
446        valid_profiles,
447        key=lambda x: (x.gs_timestamp, x.cwp_timestamp, x.chrome_build),
448    )
449
450
451def read_afdo_descriptor_file(path: Path) -> Dict[KernelVersion, str]:
452    """Reads the AFDO descriptor file.
453
454    "AFDO descriptor file" is jargon to refer to the actual JSON file that PUpr
455    monitors.
456    """
457    try:
458        with path.open(encoding="utf-8") as f:
459            raw_contents = json.load(f)
460    except FileNotFoundError:
461        return {}
462
463    # The format of this is:
464    # {
465    #   "chromeos-kernel-${major}_${minor}": {
466    #     "name": "${profile_gs_name}",
467    #   }
468    # }
469    key_re = re.compile(r"^chromeos-kernel-(\d)+_(\d+)$")
470    result = {}
471    for kernel_key, val in raw_contents.items():
472        m = key_re.fullmatch(kernel_key)
473        if not m:
474            raise ValueError(f"Invalid key in JSON: {kernel_key}")
475        major, minor = m.groups()
476        version = KernelVersion(major=int(major), minor=int(minor))
477        result[version] = val["name"]
478    return result
479
480
481def write_afdo_descriptor_file(
482    path: Path, contents: Dict[KernelVersion, str]
483) -> bool:
484    """Writes the file at path with the given contents.
485
486    Returns:
487        True if the file was written due to changes, False otherwise.
488    """
489    contents_dict = {
490        f"chromeos-kernel-{k.major}_{k.minor}": {"name": gs_name}
491        for k, gs_name in contents.items()
492    }
493
494    contents_json = json.dumps(contents_dict, indent=4, sort_keys=True)
495    try:
496        existing_contents = path.read_text(encoding="utf-8")
497    except FileNotFoundError:
498        existing_contents = ""
499
500    # Compare the _textual representation_ of each of these, since things like
501    # formatting changes should be propagated eagerly.
502    if contents_json == existing_contents:
503        return False
504
505    tmp_path = path.with_suffix(".json.tmp")
506    tmp_path.write_text(contents_json, encoding="utf-8")
507    tmp_path.rename(path)
508    return True
509
510
511@dataclasses.dataclass
512class UpdateResult:
513    """Result of `update_afdo_for_channel`."""
514
515    # True if changes were made to the AFDO files that map kernel versions to
516    # AFDO profiles.
517    made_changes: bool
518
519    # Whether issues were had updating one or more profiles. If this is True,
520    # you may expect that there will be logs about the issues already.
521    had_failures: bool
522
523
524def fetch_and_validate_newest_afdo_artifact(
525    fetcher: KernelProfileFetcher,
526    selection_info: ProfileSelectionInfo,
527    arch: Arch,
528    kernel_version: KernelVersion,
529    branch: GitBranch,
530    channel: Channel,
531) -> Optional[Tuple[str, bool]]:
532    """Tries to update one AFDO profile on a branch.
533
534    Returns:
535        None if something failed, and the update couldn't be completed.
536        Otherwise, this returns a tuple of (profile_name, is_old). If `is_old`
537        is True, this function logs an error.
538    """
539    newest_artifact = find_newest_afdo_artifact(
540        fetcher, arch, kernel_version, branch.release_number
541    )
542    # Try an older branch if we're not on stable. We should fail harder if we
543    # only have old profiles on stable, though.
544    if newest_artifact is None and channel != Channel.STABLE:
545        newest_artifact = find_newest_afdo_artifact(
546            fetcher, arch, kernel_version, branch.release_number - 1
547        )
548
549    if newest_artifact is None:
550        logging.error(
551            "No new profile found for %s/%s on M%d; not updating entry",
552            arch,
553            kernel_version,
554            branch.release_number,
555        )
556        return None
557
558    logging.info(
559        "Newest profile is %s for %s/%s on M%d",
560        newest_artifact.file_name,
561        arch,
562        kernel_version,
563        branch.release_number,
564    )
565    age = selection_info.now - newest_artifact.gs_timestamp
566    is_old = False
567    if age > selection_info.max_profile_age:
568        is_old = True
569        logging.error(
570            "Profile %s is %s old. The configured limit is %s.",
571            newest_artifact.file_name,
572            age,
573            selection_info.max_profile_age,
574        )
575    return newest_artifact.file_name_no_suffix, is_old
576
577
578def update_afdo_for_channel(
579    fetcher: KernelProfileFetcher,
580    toolchain_utils: Path,
581    selection_info: ProfileSelectionInfo,
582    channel: Channel,
583    branch: GitBranch,
584    skipped_versions: Dict[int, Iterable[Tuple[Arch, KernelVersion]]],
585) -> UpdateResult:
586    """Updates AFDO on the given channel."""
587    git_checkout(toolchain_utils, branch)
588    update_cfgs = read_update_cfg_file(
589        toolchain_utils,
590        toolchain_utils / "afdo_tools" / "update_kernel_afdo.cfg",
591    )
592
593    to_skip = skipped_versions.get(branch.release_number)
594    made_changes = False
595    had_failures = False
596    for arch, cfg in update_cfgs.items():
597        afdo_mappings = read_afdo_descriptor_file(cfg.metadata_file)
598        for kernel_version in cfg.versions_to_track:
599            if to_skip and (arch, kernel_version) in to_skip:
600                logging.info(
601                    "%s/%s on M%d is in the skip list; ignoring it.",
602                    arch,
603                    kernel_version,
604                    branch.release_number,
605                )
606                continue
607
608            artifact_info = fetch_and_validate_newest_afdo_artifact(
609                fetcher,
610                selection_info,
611                arch,
612                kernel_version,
613                branch,
614                channel,
615            )
616            if artifact_info is None:
617                # Assume that the problem was already logged.
618                had_failures = True
619                continue
620
621            newest_name, is_old = artifact_info
622            if is_old:
623                # Assume that the problem was already logged, but continue to
624                # land this in case it makes a difference.
625                had_failures = True
626
627            afdo_mappings[kernel_version] = newest_name
628
629        if write_afdo_descriptor_file(cfg.metadata_file, afdo_mappings):
630            made_changes = True
631            logging.info(
632                "Wrote new AFDO mappings for arch %s on M%d",
633                arch,
634                branch.release_number,
635            )
636        else:
637            logging.info(
638                "No changes to write for arch %s on M%d",
639                arch,
640                branch.release_number,
641            )
642    return UpdateResult(
643        made_changes=made_changes,
644        had_failures=had_failures,
645    )
646
647
648def commit_new_profiles(
649    toolchain_utils: Path, channel: Channel, had_failures: bool
650):
651    """Runs `git commit -a` with an appropriate message."""
652    commit_message_lines = [
653        "afdo_metadata: Publish the new kernel profiles",
654        "",
655    ]
656
657    if had_failures:
658        commit_message_lines += (
659            "This brings some profiles to their newest versions. The CrOS",
660            "toolchain detective has been notified about the failures that",
661            "occurred in this update.",
662        )
663    else:
664        commit_message_lines.append(
665            "This brings all profiles to their newest versions."
666        )
667
668    if channel != Channel.CANARY:
669        commit_message_lines += (
670            "",
671            "Have PM pre-approval because this shouldn't break the release",
672            "branch.",
673        )
674
675    commit_message_lines += (
676        "",
677        "BUG=None",
678        "TEST=Verified in kernel-release-afdo-verify-orchestrator",
679    )
680
681    commit_msg = "\n".join(commit_message_lines)
682    subprocess.run(
683        [
684            "git",
685            "commit",
686            "--quiet",
687            "-a",
688            "-m",
689            commit_msg,
690        ],
691        cwd=toolchain_utils,
692        check=True,
693        stdin=subprocess.DEVNULL,
694    )
695
696
697def upload_head_to_gerrit(
698    toolchain_utils: Path,
699    chromeos_tree: Optional[Path],
700    branch: GitBranch,
701):
702    """Uploads HEAD to gerrit as a CL, and sets reviewers/CCs."""
703    cl_ids = git_utils.upload_to_gerrit(
704        toolchain_utils,
705        branch.remote,
706        branch.branch_name,
707        CL_REVIEWERS,
708        CL_CC,
709    )
710
711    if len(cl_ids) > 1:
712        raise ValueError(f"Unexpected: wanted just one CL upload; got {cl_ids}")
713
714    cl_id = cl_ids[0]
715    logging.info("Uploaded CL http://crrev.com/c/%s successfully.", cl_id)
716
717    if chromeos_tree is None:
718        logging.info(
719            "Skipping gerrit convenience commands, since no CrOS tree was "
720            "specified."
721        )
722        return
723
724    git_utils.try_set_autosubmit_labels(chromeos_tree, cl_id)
725
726
727def find_chromeos_tree_root(a_dir: Path) -> Optional[Path]:
728    for parent in a_dir.parents:
729        if (parent / ".repo").is_dir():
730            return parent
731    return None
732
733
734def main(argv: List[str]) -> None:
735    my_dir = Path(__file__).resolve().parent
736    toolchain_utils = my_dir.parent
737
738    opts = get_parser().parse_args(argv)
739    logging.basicConfig(
740        format=">> %(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: "
741        "%(message)s",
742        level=logging.DEBUG if opts.debug else logging.INFO,
743    )
744
745    chromeos_tree = opts.chromeos_tree
746    if not chromeos_tree:
747        chromeos_tree = find_chromeos_tree_root(my_dir)
748        if chromeos_tree:
749            logging.info("Autodetected ChromeOS tree root at %s", chromeos_tree)
750
751    if opts.fetch:
752        logging.info("Fetching in %s...", toolchain_utils)
753        git_fetch(toolchain_utils)
754
755    selection_info = ProfileSelectionInfo(
756        now=datetime.datetime.now(datetime.timezone.utc),
757        max_profile_age=datetime.timedelta(days=opts.max_age_days),
758    )
759
760    branches = autodetect_branches(toolchain_utils)
761    logging.debug("Current branches: %s", branches)
762
763    assert all(x in branches for x in Channel), "branches are missing channels?"
764
765    fetcher = KernelProfileFetcher()
766    had_failures = False
767    with git_utils.create_worktree(toolchain_utils) as worktree:
768        for channel in opts.channel:
769            branch = branches[channel]
770            result = update_afdo_for_channel(
771                fetcher,
772                worktree,
773                selection_info,
774                channel,
775                branch,
776                SKIPPED_VERSIONS,
777            )
778            had_failures = had_failures or result.had_failures
779            if not result.made_changes:
780                logging.info("No new updates to post on %s", channel)
781                continue
782
783            commit_new_profiles(worktree, channel, result.had_failures)
784            if opts.upload:
785                logging.info("New profiles were committed. Uploading...")
786                upload_head_to_gerrit(worktree, chromeos_tree, branch)
787            else:
788                logging.info(
789                    "--upload not specified. Leaving commit for %s at %s",
790                    channel,
791                    git_rev_parse(worktree, "HEAD"),
792                )
793
794    if had_failures:
795        sys.exit(
796            "At least one failure was encountered running this script; see "
797            "above logs. Most likely the things you're looking for are logged "
798            "at the ERROR level."
799        )
800
801
802if __name__ == "__main__":
803    main(sys.argv[1:])
804