xref: /aosp_15_r20/external/grpc-grpc/tools/run_tests/run_microbenchmark.py (revision cc02d7e222339f7a4f6ba5f422e6413f4bd931f2)
1#!/usr/bin/env python3
2# Copyright 2017 gRPC authors.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16import argparse
17import html
18import multiprocessing
19import os
20import subprocess
21import sys
22
23import python_utils.jobset as jobset
24import python_utils.start_port_server as start_port_server
25
26sys.path.append(
27    os.path.join(
28        os.path.dirname(sys.argv[0]),
29        "..",
30        "profiling",
31        "microbenchmarks",
32        "bm_diff",
33    )
34)
35import bm_constants
36
37flamegraph_dir = os.path.join(os.path.expanduser("~"), "FlameGraph")
38
39os.chdir(os.path.join(os.path.dirname(sys.argv[0]), "../.."))
40if not os.path.exists("reports"):
41    os.makedirs("reports")
42
43start_port_server.start_port_server()
44
45
46def fnize(s):
47    out = ""
48    for c in s:
49        if c in "<>, /":
50            if len(out) and out[-1] == "_":
51                continue
52            out += "_"
53        else:
54            out += c
55    return out
56
57
58# index html
59index_html = """
60<html>
61<head>
62<title>Microbenchmark Results</title>
63</head>
64<body>
65"""
66
67
68def heading(name):
69    global index_html
70    index_html += "<h1>%s</h1>\n" % name
71
72
73def link(txt, tgt):
74    global index_html
75    index_html += '<p><a href="%s">%s</a></p>\n' % (
76        html.escape(tgt, quote=True),
77        html.escape(txt),
78    )
79
80
81def text(txt):
82    global index_html
83    index_html += "<p><pre>%s</pre></p>\n" % html.escape(txt)
84
85
86def _bazel_build_benchmark(bm_name, cfg):
87    """Build given benchmark with bazel"""
88    subprocess.check_call(
89        [
90            "tools/bazel",
91            "build",
92            "--config=%s" % cfg,
93            "//test/cpp/microbenchmarks:%s" % bm_name,
94        ]
95    )
96
97
98def run_summary(bm_name, cfg, base_json_name):
99    _bazel_build_benchmark(bm_name, cfg)
100    cmd = [
101        "bazel-bin/test/cpp/microbenchmarks/%s" % bm_name,
102        "--benchmark_out=%s.%s.json" % (base_json_name, cfg),
103        "--benchmark_out_format=json",
104    ]
105    if args.summary_time is not None:
106        cmd += ["--benchmark_min_time=%d" % args.summary_time]
107    return subprocess.check_output(cmd).decode("UTF-8")
108
109
110def collect_summary(bm_name, args):
111    # no counters, run microbenchmark and add summary
112    # both to HTML report and to console.
113    nocounters_heading = "Summary: %s" % bm_name
114    nocounters_summary = run_summary(bm_name, "opt", bm_name)
115    heading(nocounters_heading)
116    text(nocounters_summary)
117    print(nocounters_heading)
118    print(nocounters_summary)
119
120
121collectors = {
122    "summary": collect_summary,
123}
124
125argp = argparse.ArgumentParser(description="Collect data from microbenchmarks")
126argp.add_argument(
127    "-c",
128    "--collect",
129    choices=sorted(collectors.keys()),
130    nargs="*",
131    default=sorted(collectors.keys()),
132    help="Which collectors should be run against each benchmark",
133)
134argp.add_argument(
135    "-b",
136    "--benchmarks",
137    choices=bm_constants._AVAILABLE_BENCHMARK_TESTS,
138    default=bm_constants._AVAILABLE_BENCHMARK_TESTS,
139    nargs="+",
140    type=str,
141    help="Which microbenchmarks should be run",
142)
143argp.add_argument(
144    "--bq_result_table",
145    default="",
146    type=str,
147    help=(
148        "Upload results from summary collection to a specified bigquery table."
149    ),
150)
151argp.add_argument(
152    "--summary_time",
153    default=None,
154    type=int,
155    help="Minimum time to run benchmarks for the summary collection",
156)
157args = argp.parse_args()
158
159try:
160    for collect in args.collect:
161        for bm_name in args.benchmarks:
162            collectors[collect](bm_name, args)
163finally:
164    if not os.path.exists("reports"):
165        os.makedirs("reports")
166    index_html += "</body>\n</html>\n"
167    with open("reports/index.html", "w") as f:
168        f.write(index_html)
169