1# Copyright 2022 The Bazel Authors. All rights reserved. 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14"""Common functions that are specific to Bazel rule implementation""" 15 16load("@bazel_skylib//lib:paths.bzl", "paths") 17load("@rules_cc//cc:defs.bzl", "CcInfo", "cc_common") 18load("//python/private:py_interpreter_program.bzl", "PyInterpreterProgramInfo") 19load("//python/private:toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE", "TARGET_TOOLCHAIN_TYPE") 20load(":attributes.bzl", "PrecompileAttr", "PrecompileInvalidationModeAttr", "PrecompileSourceRetentionAttr") 21load(":common.bzl", "is_bool") 22load(":providers.bzl", "PyCcLinkParamsProvider") 23load(":py_internal.bzl", "py_internal") 24 25_py_builtins = py_internal 26 27def collect_cc_info(ctx, extra_deps = []): 28 """Collect C++ information from dependencies for Bazel. 29 30 Args: 31 ctx: Rule ctx; must have `deps` attribute. 32 extra_deps: list of Target to also collect C+ information from. 33 34 Returns: 35 CcInfo provider of merged information. 36 """ 37 deps = ctx.attr.deps 38 if extra_deps: 39 deps = list(deps) 40 deps.extend(extra_deps) 41 cc_infos = [] 42 for dep in deps: 43 if CcInfo in dep: 44 cc_infos.append(dep[CcInfo]) 45 46 if PyCcLinkParamsProvider in dep: 47 cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) 48 49 return cc_common.merge_cc_infos(cc_infos = cc_infos) 50 51def maybe_precompile(ctx, srcs): 52 """Computes all the outputs (maybe precompiled) from the input srcs. 53 54 See create_binary_semantics_struct for details about this function. 55 56 Args: 57 ctx: Rule ctx. 58 srcs: List of Files; the inputs to maybe precompile. 59 60 Returns: 61 Struct of precompiling results with fields: 62 * `keep_srcs`: list of File; the input sources that should be included 63 as default outputs and runfiles. 64 * `pyc_files`: list of File; the precompiled files. 65 * `py_to_pyc_map`: dict of src File input to pyc File output. If a source 66 file wasn't precompiled, it won't be in the dict. 67 """ 68 69 # The exec tools toolchain and precompiler are optional. Rather than 70 # fail, just skip precompiling, as its mostly just an optimization. 71 exec_tools_toolchain = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE] 72 if exec_tools_toolchain == None or exec_tools_toolchain.exec_tools.precompiler == None: 73 precompile = PrecompileAttr.DISABLED 74 else: 75 precompile = PrecompileAttr.get_effective_value(ctx) 76 77 source_retention = PrecompileSourceRetentionAttr.get_effective_value(ctx) 78 79 result = struct( 80 keep_srcs = [], 81 pyc_files = [], 82 py_to_pyc_map = {}, 83 ) 84 for src in srcs: 85 # The logic below is a bit convoluted. The gist is: 86 # * If precompiling isn't done, add the py source to default outputs. 87 # Otherwise, the source retention flag decides. 88 # * In order to determine `use_pycache`, we have to know if the source 89 # is being added to the default outputs. 90 is_generated_source = not src.is_source 91 should_precompile = ( 92 precompile == PrecompileAttr.ENABLED or 93 (precompile == PrecompileAttr.IF_GENERATED_SOURCE and is_generated_source) 94 ) 95 keep_source = ( 96 not should_precompile or 97 source_retention == PrecompileSourceRetentionAttr.KEEP_SOURCE or 98 (source_retention == PrecompileSourceRetentionAttr.OMIT_IF_GENERATED_SOURCE and not is_generated_source) 99 ) 100 if should_precompile: 101 pyc = _precompile(ctx, src, use_pycache = keep_source) 102 result.pyc_files.append(pyc) 103 result.py_to_pyc_map[src] = pyc 104 if keep_source: 105 result.keep_srcs.append(src) 106 107 return result 108 109def _precompile(ctx, src, *, use_pycache): 110 """Compile a py file to pyc. 111 112 Args: 113 ctx: rule context. 114 src: File object to compile 115 use_pycache: bool. True if the output should be within the `__pycache__` 116 sub-directory. False if it should be alongside the original source 117 file. 118 119 Returns: 120 File of the generated pyc file. 121 """ 122 exec_tools_info = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE].exec_tools 123 target_toolchain = ctx.toolchains[TARGET_TOOLCHAIN_TYPE].py3_runtime 124 125 # These args control starting the precompiler, e.g., when run as a worker, 126 # these args are only passed once. 127 precompiler_startup_args = ctx.actions.args() 128 129 env = {} 130 tools = [] 131 132 precompiler = exec_tools_info.precompiler 133 if PyInterpreterProgramInfo in precompiler: 134 precompiler_executable = exec_tools_info.exec_interpreter[DefaultInfo].files_to_run 135 program_info = precompiler[PyInterpreterProgramInfo] 136 env.update(program_info.env) 137 precompiler_startup_args.add_all(program_info.interpreter_args) 138 default_info = precompiler[DefaultInfo] 139 precompiler_startup_args.add(default_info.files_to_run.executable) 140 tools.append(default_info.files_to_run) 141 elif precompiler[DefaultInfo].files_to_run: 142 precompiler_executable = precompiler[DefaultInfo].files_to_run 143 else: 144 fail(("Unrecognized precompiler: target '{}' does not provide " + 145 "PyInterpreterProgramInfo nor appears to be executable").format( 146 precompiler, 147 )) 148 149 stem = src.basename[:-(len(src.extension) + 1)] 150 if use_pycache: 151 if not target_toolchain.pyc_tag: 152 fail("Unable to create __pycache__ pyc: pyc_tag is empty") 153 pyc_path = "__pycache__/{stem}.{tag}.pyc".format( 154 stem = stem, 155 tag = target_toolchain.pyc_tag, 156 ) 157 else: 158 pyc_path = "{}.pyc".format(stem) 159 160 pyc = ctx.actions.declare_file(pyc_path, sibling = src) 161 162 invalidation_mode = ctx.attr.precompile_invalidation_mode 163 if invalidation_mode == PrecompileInvalidationModeAttr.AUTO: 164 if ctx.var["COMPILATION_MODE"] == "opt": 165 invalidation_mode = PrecompileInvalidationModeAttr.UNCHECKED_HASH 166 else: 167 invalidation_mode = PrecompileInvalidationModeAttr.CHECKED_HASH 168 169 # Though --modify_execution_info exists, it can only set keys with 170 # empty values, which doesn't work for persistent worker settings. 171 execution_requirements = {} 172 if testing.ExecutionInfo in precompiler: 173 execution_requirements.update(precompiler[testing.ExecutionInfo].requirements) 174 175 # These args are passed for every precompilation request, e.g. as part of 176 # a request to a worker process. 177 precompile_request_args = ctx.actions.args() 178 179 # Always use param files so that it can be run as a persistent worker 180 precompile_request_args.use_param_file("@%s", use_always = True) 181 precompile_request_args.set_param_file_format("multiline") 182 183 precompile_request_args.add("--invalidation_mode", invalidation_mode) 184 precompile_request_args.add("--src", src) 185 186 # NOTE: src.short_path is used because src.path contains the platform and 187 # build-specific hash portions of the path, which we don't want in the 188 # pyc data. Note, however, for remote-remote files, short_path will 189 # have the repo name, which is likely to contain extraneous info. 190 precompile_request_args.add("--src_name", src.short_path) 191 precompile_request_args.add("--pyc", pyc) 192 precompile_request_args.add("--optimize", ctx.attr.precompile_optimize_level) 193 194 version_info = target_toolchain.interpreter_version_info 195 python_version = "{}.{}".format(version_info.major, version_info.minor) 196 precompile_request_args.add("--python_version", python_version) 197 198 ctx.actions.run( 199 executable = precompiler_executable, 200 arguments = [precompiler_startup_args, precompile_request_args], 201 inputs = [src], 202 outputs = [pyc], 203 mnemonic = "PyCompile", 204 progress_message = "Python precompiling %{input} into %{output}", 205 tools = tools, 206 env = env | { 207 "PYTHONHASHSEED": "0", # Helps avoid non-deterministic behavior 208 "PYTHONNOUSERSITE": "1", # Helps avoid non-deterministic behavior 209 "PYTHONSAFEPATH": "1", # Helps avoid incorrect import issues 210 }, 211 execution_requirements = execution_requirements, 212 toolchain = EXEC_TOOLS_TOOLCHAIN_TYPE, 213 ) 214 return pyc 215 216def get_imports(ctx): 217 """Gets the imports from a rule's `imports` attribute. 218 219 See create_binary_semantics_struct for details about this function. 220 221 Args: 222 ctx: Rule ctx. 223 224 Returns: 225 List of strings. 226 """ 227 prefix = "{}/{}".format( 228 ctx.workspace_name, 229 _py_builtins.get_label_repo_runfiles_path(ctx.label), 230 ) 231 result = [] 232 for import_str in ctx.attr.imports: 233 import_str = ctx.expand_make_variables("imports", import_str, {}) 234 if import_str.startswith("/"): 235 continue 236 237 # To prevent "escaping" out of the runfiles tree, we normalize 238 # the path and ensure it doesn't have up-level references. 239 import_path = paths.normalize("{}/{}".format(prefix, import_str)) 240 if import_path.startswith("../") or import_path == "..": 241 fail("Path '{}' references a path above the execution root".format( 242 import_str, 243 )) 244 result.append(import_path) 245 return result 246 247def convert_legacy_create_init_to_int(kwargs): 248 """Convert "legacy_create_init" key to int, in-place. 249 250 Args: 251 kwargs: The kwargs to modify. The key "legacy_create_init", if present 252 and bool, will be converted to its integer value, in place. 253 """ 254 if is_bool(kwargs.get("legacy_create_init")): 255 kwargs["legacy_create_init"] = 1 if kwargs["legacy_create_init"] else 0 256