1# Copyright 2023 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14 15load("@rules_python//python:proto.bzl", "py_proto_library") 16load("//pw_build:compatibility.bzl", "incompatible_with_mcu") 17load("//pw_build:pw_cc_binary.bzl", "pw_cc_binary") 18load("//pw_build:pw_cc_blob_library.bzl", "pw_cc_blob_info", "pw_cc_blob_library") 19load("//pw_build:pw_linker_script.bzl", "pw_linker_script") 20load("//pw_fuzzer:fuzzer.bzl", "pw_cc_fuzz_test") 21load("//pw_unit_test:pw_cc_test.bzl", "pw_cc_test") 22 23package(default_visibility = ["//visibility:public"]) 24 25licenses(["notice"]) 26 27cc_library( 28 name = "pw_tokenizer", 29 srcs = [ 30 "encode_args.cc", 31 "hash.cc", 32 "tokenize.cc", 33 ], 34 hdrs = [ 35 "public/pw_tokenizer/config.h", 36 "public/pw_tokenizer/encode_args.h", 37 "public/pw_tokenizer/enum.h", 38 "public/pw_tokenizer/hash.h", 39 "public/pw_tokenizer/internal/argument_types.h", 40 "public/pw_tokenizer/internal/argument_types_macro_4_byte.h", 41 "public/pw_tokenizer/internal/argument_types_macro_8_byte.h", 42 "public/pw_tokenizer/internal/enum.h", 43 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_128_hash_macro.h", 44 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_256_hash_macro.h", 45 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_80_hash_macro.h", 46 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_96_hash_macro.h", 47 "public/pw_tokenizer/internal/tokenize_string.h", 48 "public/pw_tokenizer/nested_tokenization.h", 49 "public/pw_tokenizer/tokenize.h", 50 ], 51 strip_include_prefix = "public", 52 deps = [ 53 ":config_override", 54 "//pw_bytes:bit", 55 "//pw_containers:to_array", 56 "//pw_polyfill", 57 "//pw_preprocessor", 58 "//pw_span", 59 "//pw_varint", 60 ], 61) 62 63label_flag( 64 name = "config_override", 65 build_setting_default = "//pw_build:default_module_config", 66) 67 68pw_linker_script( 69 name = "linker_script", 70 linker_script = "pw_tokenizer_linker_sections.ld", 71) 72 73cc_library( 74 name = "test_backend", 75 visibility = ["//targets:__pkg__"], 76) 77 78cc_library( 79 name = "base64", 80 srcs = [ 81 "base64.cc", 82 ], 83 hdrs = [ 84 "public/pw_tokenizer/base64.h", 85 ], 86 strip_include_prefix = "public", 87 deps = [ 88 ":pw_tokenizer", 89 "//pw_base64", 90 "//pw_preprocessor", 91 "//pw_span", 92 "//pw_string:string", 93 ], 94) 95 96cc_library( 97 name = "decoder", 98 srcs = [ 99 "decode.cc", 100 "detokenize.cc", 101 "token_database.cc", 102 ], 103 hdrs = [ 104 "public/pw_tokenizer/detokenize.h", 105 "public/pw_tokenizer/internal/decode.h", 106 "public/pw_tokenizer/token_database.h", 107 ], 108 implementation_deps = [ 109 "//pw_elf:reader", 110 ], 111 strip_include_prefix = "public", 112 deps = [ 113 ":base64", 114 "//pw_bytes", 115 "//pw_result", 116 "//pw_span", 117 "//pw_stream", 118 "//pw_varint", 119 ], 120) 121 122proto_library( 123 name = "tokenizer_proto", 124 srcs = [ 125 "pw_tokenizer_proto/options.proto", 126 ], 127 strip_import_prefix = "/pw_tokenizer", 128 deps = [ 129 "@com_google_protobuf//:descriptor_proto", 130 ], 131) 132 133py_proto_library( 134 name = "tokenizer_proto_py_pb2", 135 deps = [":tokenizer_proto"], 136) 137 138# Executable for generating test data for the C++ and Python detokenizers. This 139# target should only be built for the host. 140pw_cc_binary( 141 name = "generate_decoding_test_data", 142 srcs = [ 143 "generate_decoding_test_data.cc", 144 ], 145 target_compatible_with = select( 146 { 147 "@platforms//os:linux": [], 148 "@platforms//os:macos": [], 149 "@platforms//os:windows": [], 150 "//conditions:default": ["@platforms//:incompatible"], 151 }, 152 ), 153 deps = [ 154 ":decoder", 155 ":pw_tokenizer", 156 "//pw_preprocessor", 157 "//pw_span", 158 "//pw_varint", 159 ], 160) 161 162pw_cc_test( 163 name = "argument_types_test", 164 srcs = [ 165 "argument_types_test.cc", 166 "argument_types_test_c.c", 167 "pw_tokenizer_private/argument_types_test.h", 168 ], 169 deps = [ 170 ":pw_tokenizer", 171 "//pw_preprocessor", 172 "//pw_unit_test", 173 ], 174) 175 176pw_cc_test( 177 name = "base64_test", 178 srcs = [ 179 "base64_test.cc", 180 ], 181 deps = [ 182 ":base64", 183 "//pw_span", 184 "//pw_unit_test", 185 ], 186) 187 188pw_cc_test( 189 name = "decode_test", 190 srcs = [ 191 "decode_test.cc", 192 "pw_tokenizer_private/tokenized_string_decoding_test_data.h", 193 "pw_tokenizer_private/varint_decoding_test_data.h", 194 ], 195 # TODO: https://pwbug.dev/346628514 - Fix this for rp2040 196 target_compatible_with = select({ 197 "//pw_build/constraints/chipset:rp2040": ["@platforms//:incompatible"], 198 "//conditions:default": [], 199 }), 200 deps = [ 201 ":decoder", 202 "//pw_unit_test", 203 "//pw_varint", 204 ], 205) 206 207pw_cc_blob_info( 208 name = "detokenizer_example_elf_blob", 209 file_path = "//pw_tokenizer/py:example_binary_with_tokenized_strings", 210 symbol_name = "kElfSection", 211) 212 213pw_cc_blob_library( 214 name = "detokenizer_elf_test_blob", 215 blobs = [ 216 ":detokenizer_example_elf_blob", 217 ], 218 namespace = "test::ns", 219 out_header = "pw_tokenizer/example_binary_with_tokenized_strings.h", 220) 221 222pw_cc_test( 223 name = "detokenize_test", 224 srcs = [ 225 "detokenize_test.cc", 226 ], 227 deps = [ 228 ":decoder", 229 ":detokenizer_elf_test_blob", 230 "//pw_stream", 231 "//pw_unit_test", 232 ], 233) 234 235pw_cc_fuzz_test( 236 name = "detokenize_fuzzer", 237 srcs = ["detokenize_fuzzer.cc"], 238 deps = [ 239 ":decoder", 240 ":pw_tokenizer", 241 ], 242) 243 244pw_cc_test( 245 name = "encode_args_test", 246 srcs = ["encode_args_test.cc"], 247 deps = [ 248 ":pw_tokenizer", 249 "//pw_unit_test", 250 ], 251) 252 253pw_cc_test( 254 name = "hash_test", 255 srcs = [ 256 "hash_test.cc", 257 "pw_tokenizer_private/generated_hash_test_cases.h", 258 ], 259 deps = [ 260 ":pw_tokenizer", 261 "//pw_preprocessor", 262 "//pw_unit_test", 263 ], 264) 265 266pw_cc_test( 267 name = "simple_tokenize_test", 268 srcs = [ 269 "simple_tokenize_test.cc", 270 ], 271 deps = [ 272 ":pw_tokenizer", 273 "//pw_unit_test", 274 ], 275) 276 277pw_cc_test( 278 name = "token_database_test", 279 srcs = [ 280 "token_database_test.cc", 281 ], 282 deps = [ 283 ":decoder", 284 "//pw_unit_test", 285 ], 286) 287 288pw_cc_test( 289 name = "tokenize_test", 290 srcs = [ 291 "pw_tokenizer_private/tokenize_test.h", 292 "tokenize_test.cc", 293 "tokenize_test_c.c", 294 ], 295 # TODO: b/344050496 - get working on rp2040 and stm32f429i 296 target_compatible_with = incompatible_with_mcu(), 297 deps = [ 298 ":pw_tokenizer", 299 "//pw_preprocessor", 300 "//pw_unit_test", 301 "//pw_varint", 302 ], 303) 304 305pw_cc_test( 306 name = "tokenize_c99_test", 307 srcs = ["tokenize_c99_test_entry_point.cc"], 308 deps = [ 309 ":tokenize_c99_test_c", 310 "//pw_unit_test", 311 ], 312) 313 314pw_cc_test( 315 name = "enum_test", 316 srcs = [ 317 "enum_test.cc", 318 ], 319 deps = [ 320 "//pw_compilation_testing:negative_compilation_testing", 321 "//pw_tokenizer", 322 "//pw_unit_test", 323 ], 324) 325 326cc_library( 327 name = "tokenize_c99_test_c", 328 srcs = ["tokenize_c99_test.c"], 329 copts = [ 330 "-std=c99", 331 # pw_tokenizer uses static_assert, so this test uses a static_assert to 332 # verify that it works. Silence warnings about preadopting C11 features. 333 "-Wno-c11-extensions", 334 ], 335 visibility = ["//visibility:private"], 336 deps = [ 337 ":pw_tokenizer", 338 "//pw_containers:inline_var_len_entry_queue", 339 ], 340) 341 342# Create a shared library for the tokenizer JNI wrapper. The include paths for 343# the JNI headers must be available in the system or provided with the 344# pw_java_native_interface_include_dirs variable. 345filegroup( 346 name = "detokenizer_jni", 347 srcs = [ 348 "java/dev/pigweed/tokenizer/detokenizer.cc", 349 ], 350) 351 352filegroup( 353 name = "doxygen", 354 srcs = [ 355 "public/pw_tokenizer/config.h", 356 "public/pw_tokenizer/detokenize.h", 357 "public/pw_tokenizer/encode_args.h", 358 "public/pw_tokenizer/enum.h", 359 "public/pw_tokenizer/nested_tokenization.h", 360 "public/pw_tokenizer/token_database.h", 361 "public/pw_tokenizer/tokenize.h", 362 ], 363) 364