1# Copyright 2020 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14 15import("//build_overrides/pigweed.gni") 16 17import("$dir_pw_arduino_build/arduino.gni") 18import("$dir_pw_bloat/bloat.gni") 19import("$dir_pw_build/cc_blob_library.gni") 20import("$dir_pw_build/module_config.gni") 21import("$dir_pw_build/target_types.gni") 22import("$dir_pw_docgen/docs.gni") 23import("$dir_pw_fuzzer/fuzzer.gni") 24import("$dir_pw_protobuf_compiler/proto.gni") 25import("$dir_pw_unit_test/test.gni") 26 27declare_args() { 28 # The build target that overrides the default configuration options for this 29 # module. This should point to a source set that provides defines through a 30 # public config (which may -include a file or add defines directly). 31 pw_tokenizer_CONFIG = pw_build_DEFAULT_MODULE_CONFIG 32} 33 34config("public_include_path") { 35 include_dirs = [ "public" ] 36 visibility = [ ":*" ] 37} 38 39config("linker_script") { 40 inputs = [ "pw_tokenizer_linker_sections.ld" ] 41 lib_dirs = [ "." ] 42 43 # Automatically add the tokenizer linker sections when cross-compiling or 44 # building for Linux. macOS and Windows executables are not supported. 45 if (current_os == "" || current_os == "freertos") { 46 ldflags = [ 47 "-T", 48 rebase_path("pw_tokenizer_linker_sections.ld", root_build_dir), 49 ] 50 } else if (current_os == "linux" && !pw_toolchain_OSS_FUZZ_ENABLED) { 51 # When building for Linux, the linker provides a default linker script. 52 # The add_tokenizer_sections_to_default_script.ld wrapper includes the 53 # pw_tokenizer_linker_sections.ld script in a way that appends to the the 54 # default linker script instead of overriding it. 55 ldflags = [ 56 "-T", 57 rebase_path("add_tokenizer_sections_to_default_script.ld", 58 root_build_dir), 59 ] 60 61 inputs += [ "add_tokenizer_sections_to_default_script.ld" ] 62 } 63 visibility = [ ":*" ] 64} 65 66pw_source_set("config") { 67 public = [ "public/pw_tokenizer/config.h" ] 68 public_configs = [ ":public_include_path" ] 69 public_deps = [ pw_tokenizer_CONFIG ] 70} 71 72pw_source_set("pw_tokenizer") { 73 public_configs = [ ":public_include_path" ] 74 all_dependent_configs = [ ":linker_script" ] 75 public_deps = [ 76 ":config", 77 "$dir_pw_containers:to_array", 78 dir_pw_polyfill, 79 dir_pw_preprocessor, 80 dir_pw_span, 81 dir_pw_varint, 82 ] 83 public = [ 84 "public/pw_tokenizer/encode_args.h", 85 "public/pw_tokenizer/enum.h", 86 "public/pw_tokenizer/hash.h", 87 "public/pw_tokenizer/nested_tokenization.h", 88 "public/pw_tokenizer/tokenize.h", 89 ] 90 sources = [ 91 "encode_args.cc", 92 "hash.cc", 93 "public/pw_tokenizer/internal/argument_types.h", 94 "public/pw_tokenizer/internal/argument_types_macro_4_byte.h", 95 "public/pw_tokenizer/internal/argument_types_macro_8_byte.h", 96 "public/pw_tokenizer/internal/enum.h", 97 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_128_hash_macro.h", 98 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_256_hash_macro.h", 99 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_80_hash_macro.h", 100 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_96_hash_macro.h", 101 "public/pw_tokenizer/internal/tokenize_string.h", 102 "tokenize.cc", 103 ] 104 friend = [ ":*" ] 105} 106 107pw_source_set("base64") { 108 public_configs = [ ":public_include_path" ] 109 public = [ "public/pw_tokenizer/base64.h" ] 110 sources = [ "base64.cc" ] 111 public_deps = [ 112 ":pw_tokenizer", 113 "$dir_pw_string:string", 114 dir_pw_base64, 115 dir_pw_preprocessor, 116 ] 117} 118 119pw_source_set("decoder") { 120 public_configs = [ ":public_include_path" ] 121 public_deps = [ 122 dir_pw_preprocessor, 123 dir_pw_result, 124 dir_pw_span, 125 dir_pw_stream, 126 ] 127 deps = [ 128 ":base64", 129 "$dir_pw_bytes:bit", 130 "$dir_pw_elf:reader", 131 dir_pw_base64, 132 dir_pw_bytes, 133 dir_pw_varint, 134 ] 135 public = [ 136 "public/pw_tokenizer/detokenize.h", 137 "public/pw_tokenizer/token_database.h", 138 ] 139 sources = [ 140 "decode.cc", 141 "detokenize.cc", 142 "public/pw_tokenizer/internal/decode.h", 143 "token_database.cc", 144 ] 145 friend = [ ":*" ] 146} 147 148# Executable for generating test data for the C++ and Python detokenizers. This 149# target should only be built for the host. 150pw_executable("generate_decoding_test_data") { 151 deps = [ 152 ":decoder", 153 ":pw_tokenizer", 154 dir_pw_varint, 155 ] 156 sources = [ "generate_decoding_test_data.cc" ] 157} 158 159# Executable for generating a test ELF file for elf_reader_test.py. A host 160# version of this binary is checked in for use in elf_reader_test.py. 161pw_executable("elf_reader_test_binary") { 162 deps = [ 163 ":pw_tokenizer", 164 "$dir_pw_varint", 165 ] 166 sources = [ "py/elf_reader_test_binary.c" ] 167 ldflags = [ "-Wl,--unresolved-symbols=ignore-all" ] # main is not defined 168} 169 170pw_test_group("tests") { 171 tests = [ 172 ":argument_types_test", 173 ":base64_test", 174 ":decode_test", 175 ":detokenize_test", 176 ":enum_test", 177 ":encode_args_test", 178 ":hash_test", 179 ":simple_tokenize_test", 180 ":token_database_test", 181 ":tokenize_test", 182 ":tokenize_c99_test", 183 ] 184 group_deps = [ 185 ":fuzzers", 186 "$dir_pw_preprocessor:tests", 187 ] 188} 189 190pw_fuzzer_group("fuzzers") { 191 fuzzers = [ 192 ":detokenize_fuzzer", 193 ":token_database_fuzzer", 194 ] 195} 196 197pw_test("argument_types_test") { 198 sources = [ 199 "argument_types_test.cc", 200 "argument_types_test_c.c", 201 "pw_tokenizer_private/argument_types_test.h", 202 ] 203 deps = [ ":pw_tokenizer" ] 204 205 if (pw_arduino_build_CORE_PATH != "") { 206 remove_configs = [ "$dir_pw_build:strict_warnings" ] 207 } 208} 209 210pw_test("base64_test") { 211 sources = [ "base64_test.cc" ] 212 deps = [ 213 ":base64", 214 dir_pw_span, 215 ] 216} 217 218pw_test("decode_test") { 219 sources = [ 220 "decode_test.cc", 221 "pw_tokenizer_private/tokenized_string_decoding_test_data.h", 222 "pw_tokenizer_private/varint_decoding_test_data.h", 223 ] 224 deps = [ 225 ":decoder", 226 "$dir_pw_varint", 227 ] 228 229 # TODO(tonymd): This fails on Teensyduino 1.54 beta core. It may be related to 230 # linking in stl functions. Will debug when 1.54 is released. 231 enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable" 232} 233 234pw_test("detokenize_test") { 235 sources = [ "detokenize_test.cc" ] 236 deps = [ 237 ":decoder", 238 ":detokenizer_elf_test_blob", 239 dir_pw_stream, 240 ] 241 242 # TODO(tonymd): This fails on Teensyduino 1.54 beta core. It may be related to 243 # linking in stl functions. Will debug when 1.54 is released. 244 enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable" 245} 246 247pw_test("encode_args_test") { 248 sources = [ "encode_args_test.cc" ] 249 deps = [ ":pw_tokenizer" ] 250} 251 252pw_test("enum_test") { 253 sources = [ "enum_test.cc" ] 254 deps = [ ":pw_tokenizer" ] 255 negative_compilation_tests = true 256} 257 258pw_test("hash_test") { 259 sources = [ 260 "hash_test.cc", 261 "pw_tokenizer_private/generated_hash_test_cases.h", 262 ] 263 deps = [ ":pw_tokenizer" ] 264} 265 266pw_test("simple_tokenize_test") { 267 sources = [ "simple_tokenize_test.cc" ] 268 deps = [ ":pw_tokenizer" ] 269} 270 271pw_test("token_database_test") { 272 sources = [ "token_database_test.cc" ] 273 deps = [ ":decoder" ] 274} 275 276pw_test("tokenize_test") { 277 sources = [ 278 "pw_tokenizer_private/tokenize_test.h", 279 "tokenize_test.cc", 280 "tokenize_test_c.c", 281 ] 282 deps = [ 283 ":pw_tokenizer", 284 "$dir_pw_varint", 285 ] 286} 287 288pw_test("tokenize_c99_test") { 289 cflags_c = [ 290 "-std=c99", 291 292 # pw_tokenizer uses static_assert, so this test uses a static_assert to 293 # verify that it works. Silence warnings about preadopting C11 features. 294 "-Wno-c11-extensions", 295 ] 296 sources = [ 297 "tokenize_c99_test.c", 298 "tokenize_c99_test_entry_point.cc", 299 ] 300 deps = [ 301 ":pw_tokenizer", 302 "$dir_pw_containers:inline_var_len_entry_queue", 303 ] 304} 305 306pw_fuzzer("token_database_fuzzer") { 307 sources = [ "token_database_fuzzer.cc" ] 308 deps = [ 309 ":decoder", 310 dir_pw_preprocessor, 311 dir_pw_span, 312 ] 313} 314 315pw_fuzzer("detokenize_fuzzer") { 316 sources = [ "detokenize_fuzzer.cc" ] 317 deps = [ 318 ":decoder", 319 dir_pw_preprocessor, 320 ] 321} 322 323pw_proto_library("proto") { 324 sources = [ "pw_tokenizer_proto/options.proto" ] 325 python_package = "py" 326} 327 328declare_args() { 329 # pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS specifies the paths to use for 330 # building Java Native Interface libraries. If no paths are provided, targets 331 # that require JNI may not build correctly. 332 # 333 # Example JNI include paths for a Linux system: 334 # 335 # pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS = [ 336 # "/usr/local/buildtools/java/jdk/include/", 337 # "/usr/local/buildtools/java/jdk/include/linux", 338 # ] 339 # 340 pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS = [] 341} 342 343# Create a shared library for the tokenizer JNI wrapper. The include paths for 344# the JNI headers must be available in the system or provided with the 345# pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS variable. 346pw_shared_library("detokenizer_jni") { 347 public_configs = [ ":public_include_path" ] 348 include_dirs = pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS 349 sources = [ "java/dev/pigweed/tokenizer/detokenizer.cc" ] 350 public_deps = [ 351 ":decoder", 352 "$dir_pw_preprocessor", 353 ] 354 deps = [ dir_pw_span ] 355} 356 357pw_doc_group("docs") { 358 inputs = [ 359 "Kconfig", 360 "py/pw_tokenizer/encode.py", 361 ] 362 sources = [ 363 "api.rst", 364 "detokenization.rst", 365 "docs.rst", 366 "get_started.rst", 367 "token_databases.rst", 368 "tokenization.rst", 369 ] 370 report_deps = [ ":tokenizer_size_report" ] 371} 372 373# Pigweed tokenizer size report. 374pw_size_diff("tokenizer_size_report") { 375 title = "Pigweed tokenizer size report" 376 binaries = [ 377 { 378 target = "size_report:tokenize_string" 379 base = "size_report:tokenize_string_base" 380 label = "tokenize a string" 381 }, 382 { 383 target = "size_report:tokenize_string_expr" 384 base = "size_report:tokenize_string_expr_base" 385 label = "tokenize a string expression" 386 }, 387 ] 388} 389 390pw_cc_blob_library("detokenizer_elf_test_blob") { 391 out_header = "pw_tokenizer/example_binary_with_tokenized_strings.h" 392 namespace = "test::ns" 393 blobs = [ 394 { 395 file_path = "py/example_binary_with_tokenized_strings.elf" 396 symbol_name = "kElfSection" 397 }, 398 ] 399 visibility = [ ":*" ] 400} 401