# Copyright 2023 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. load("@rules_python//python:proto.bzl", "py_proto_library") load( "//pw_build:pigweed.bzl", "pw_cc_binary", "pw_cc_blob_info", "pw_cc_blob_library", "pw_cc_test", "pw_linker_script", ) load("//pw_build:selects.bzl", "TARGET_COMPATIBLE_WITH_HOST_SELECT") load("//pw_fuzzer:fuzzer.bzl", "pw_cc_fuzz_test") package(default_visibility = ["//visibility:public"]) licenses(["notice"]) cc_library( name = "pw_tokenizer", srcs = [ "encode_args.cc", "hash.cc", "public/pw_tokenizer/config.h", "public/pw_tokenizer/internal/argument_types.h", "public/pw_tokenizer/internal/argument_types_macro_4_byte.h", "public/pw_tokenizer/internal/argument_types_macro_8_byte.h", "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_128_hash_macro.h", "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_256_hash_macro.h", "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_80_hash_macro.h", "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_96_hash_macro.h", "public/pw_tokenizer/internal/tokenize_string.h", "tokenize.cc", ], hdrs = [ "public/pw_tokenizer/encode_args.h", "public/pw_tokenizer/hash.h", "public/pw_tokenizer/nested_tokenization.h", "public/pw_tokenizer/tokenize.h", ], includes = ["public"], deps = [ ":config_override", "//pw_bytes:bit", "//pw_containers:to_array", "//pw_polyfill", "//pw_preprocessor", "//pw_span", "//pw_varint", ], ) label_flag( name = "config_override", build_setting_default = "//pw_build:default_module_config", ) pw_linker_script( name = "linker_script", linker_script = "pw_tokenizer_linker_sections.ld", ) cc_library( name = "test_backend", visibility = ["@pigweed//targets:__pkg__"], ) cc_library( name = "base64", srcs = [ "base64.cc", ], hdrs = [ "public/pw_tokenizer/base64.h", ], includes = ["public"], deps = [ ":pw_tokenizer", "//pw_base64", "//pw_preprocessor", "//pw_span", "//pw_string:string", ], ) cc_library( name = "decoder", srcs = [ "decode.cc", "detokenize.cc", "token_database.cc", ], hdrs = [ "public/pw_tokenizer/detokenize.h", "public/pw_tokenizer/internal/decode.h", "public/pw_tokenizer/token_database.h", ], includes = ["public"], deps = [ ":base64", "//pw_bytes", "//pw_result", "//pw_span", "//pw_varint", ], ) proto_library( name = "tokenizer_proto", srcs = [ "pw_tokenizer_proto/options.proto", ], strip_import_prefix = "/pw_tokenizer", deps = [ "@com_google_protobuf//:descriptor_proto", ], ) py_proto_library( name = "tokenizer_proto_py_pb2", deps = [":tokenizer_proto"], ) # Executable for generating test data for the C++ and Python detokenizers. This # target should only be built for the host. pw_cc_binary( name = "generate_decoding_test_data", srcs = [ "generate_decoding_test_data.cc", ], target_compatible_with = select( { "@platforms//os:linux": [], "@platforms//os:macos": [], "@platforms//os:windows": [], "//conditions:default": ["@platforms//:incompatible"], }, ), deps = [ ":decoder", ":pw_tokenizer", "//pw_preprocessor", "//pw_span", "//pw_varint", ], ) pw_cc_test( name = "argument_types_test", srcs = [ "argument_types_test.cc", "argument_types_test_c.c", "pw_tokenizer_private/argument_types_test.h", ], deps = [ ":pw_tokenizer", "//pw_preprocessor", "//pw_unit_test", ], ) pw_cc_test( name = "base64_test", srcs = [ "base64_test.cc", ], deps = [ ":base64", "//pw_span", "//pw_unit_test", ], ) pw_cc_test( name = "decode_test", srcs = [ "decode_test.cc", "pw_tokenizer_private/tokenized_string_decoding_test_data.h", "pw_tokenizer_private/varint_decoding_test_data.h", ], deps = [ ":decoder", "//pw_unit_test", "//pw_varint", ], ) pw_cc_blob_info( name = "detokenizer_example_elf_blob", file_path = "//pw_tokenizer/py:example_binary_with_tokenized_strings", symbol_name = "kElfSection", ) pw_cc_blob_library( name = "detokenizer_elf_test_blob", blobs = [ ":detokenizer_example_elf_blob", ], namespace = "test::ns", out_header = "pw_tokenizer/example_binary_with_tokenized_strings.h", ) pw_cc_test( name = "detokenize_test", srcs = [ "detokenize_test.cc", ], deps = [ ":decoder", ":detokenizer_elf_test_blob", "//pw_unit_test", ], ) pw_cc_fuzz_test( name = "detokenize_fuzzer", srcs = ["detokenize_fuzzer.cc"], deps = [ ":decoder", ":pw_tokenizer", ], ) pw_cc_test( name = "encode_args_test", srcs = ["encode_args_test.cc"], deps = [ ":pw_tokenizer", "//pw_unit_test", ], ) pw_cc_test( name = "hash_test", srcs = [ "hash_test.cc", "pw_tokenizer_private/generated_hash_test_cases.h", ], deps = [ ":pw_tokenizer", "//pw_preprocessor", "//pw_unit_test", ], ) pw_cc_test( name = "simple_tokenize_test", srcs = [ "simple_tokenize_test.cc", ], deps = [ ":pw_tokenizer", "//pw_unit_test", ], ) pw_cc_test( name = "token_database_test", srcs = [ "token_database_test.cc", ], deps = [ ":decoder", "//pw_unit_test", ], ) pw_cc_test( name = "tokenize_test", srcs = [ "pw_tokenizer_private/tokenize_test.h", "tokenize_test.cc", "tokenize_test_c.c", ], # TODO: b/344050496 - get working on rp2040 and stm32f429i target_compatible_with = select(TARGET_COMPATIBLE_WITH_HOST_SELECT), deps = [ ":pw_tokenizer", "//pw_preprocessor", "//pw_unit_test", "//pw_varint", ], ) pw_cc_test( name = "tokenize_c99_test", srcs = ["tokenize_c99_test_entry_point.cc"], deps = [ ":tokenize_c99_test_c", "//pw_unit_test", ], ) cc_library( name = "tokenize_c99_test_c", srcs = ["tokenize_c99_test.c"], copts = [ "-std=c99", # pw_tokenizer uses static_assert, so this test uses a static_assert to # verify that it works. Silence warnings about preadopting C11 features. "-Wno-c11-extensions", ], visibility = ["//visibility:private"], deps = [ ":pw_tokenizer", "//pw_containers:inline_var_len_entry_queue", ], ) # Create a shared library for the tokenizer JNI wrapper. The include paths for # the JNI headers must be available in the system or provided with the # pw_java_native_interface_include_dirs variable. filegroup( name = "detokenizer_jni", srcs = [ "java/dev/pigweed/tokenizer/detokenizer.cc", ], )