pw_tokenizer: Update C naming style
- Update C symbols to the module_name_SymbolName style.
- Rename and move the token alias.
- Update pw_metric/metric.h to refer to pw::tokenizer::Token instead of
uint32_t.
Change-Id: I75c5df8a690b9c29472a8e64c6d82aa4fefca6aa
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/17160
Commit-Queue: Wyatt Hepler <hepler@google.com>
Reviewed-by: Keir Mierle <keir@google.com>
diff --git a/pw_log_tokenized/docs.rst b/pw_log_tokenized/docs.rst
index 4632af6..1225006 100644
--- a/pw_log_tokenized/docs.rst
+++ b/pw_log_tokenized/docs.rst
@@ -9,7 +9,7 @@
----------------
``pw_log_tokenized`` is a ``pw_log`` backend that tokenizes log messages using
the ``pw_tokenizer`` module. Log messages are tokenized and passed to the
-``pw_TokenizerHandleEncodedMessageWithPayload`` function. For maximum
+``pw_tokenizer_HandleEncodedMessageWithPayload`` function. For maximum
efficiency, the log level, 16-bit tokenized module name, and flags bits are
passed through the payload argument.
@@ -17,8 +17,8 @@
.. code-block:: cpp
- extern "C" void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload payload, const uint8_t message[], size_t size) {
+ extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload payload, const uint8_t message[], size_t size) {
// The metadata object provides the log level, module token, and flags.
// These values can be recorded and used for runtime filtering.
pw::log_tokenized::Metadata metadata(payload);
diff --git a/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h b/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h
index 59d48d0..1589e88 100644
--- a/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h
+++ b/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h
@@ -24,12 +24,12 @@
// PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD. The log level, module token, and
// flags are packed into the payload argument.
//
-// To use this macro, implement pw_TokenizerHandleEncodedMessageWithPayload,
+// To use this macro, implement pw_tokenizer_HandleEncodedMessageWithPayload,
// which is defined in pw_tokenizer/tokenize.h. The log metadata can be accessed
// using pw::log_tokenized::Metadata. For example:
//
-// extern "C" void pw_TokenizerHandleEncodedMessageWithPayload(
-// pw_TokenizerPayload payload, const uint8_t data[], size_t size) {
+// extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
+// pw_tokenizer_Payload payload, const uint8_t data[], size_t size) {
// pw::log_tokenized::Metadata metadata(payload);
//
// if (metadata.level() >= kLogLevel && ModuleEnabled(metadata.module())) {
diff --git a/pw_log_tokenized/test.cc b/pw_log_tokenized/test.cc
index 7355839..6b5f976 100644
--- a/pw_log_tokenized/test.cc
+++ b/pw_log_tokenized/test.cc
@@ -26,14 +26,12 @@
Metadata metadata(0);
size_t encoded_data_size = 0;
-extern "C" void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload payload, const uint8_t[], size_t size) {
+extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload payload, const uint8_t[], size_t size) {
metadata = payload;
encoded_data_size = size;
}
-extern "C" void pw_TokenizerHandleEncodedMessage(const uint8_t[], size_t) {}
-
constexpr uintptr_t kModuleToken =
PW_TOKENIZER_STRING_TOKEN(PW_LOG_MODULE_NAME) &
((1u << _PW_LOG_TOKENIZED_MODULE_BITS) - 1);
diff --git a/pw_metric/public/pw_metric/metric.h b/pw_metric/public/pw_metric/metric.h
index 9c45ea0..89873e9 100644
--- a/pw_metric/public/pw_metric/metric.h
+++ b/pw_metric/public/pw_metric/metric.h
@@ -26,7 +26,7 @@
// Currently, this is for tokens, but later may be a char* when non-tokenized
// metric names are supported.
-typedef uint32_t Token;
+using tokenizer::Token;
// An individual metric. There are only two supported types: uint32_t and
// float. More complicated compound metrics can be built on these primitives.
diff --git a/pw_tokenizer/argument_types_test_c.c b/pw_tokenizer/argument_types_test_c.c
index f594200..2030587 100644
--- a/pw_tokenizer/argument_types_test_c.c
+++ b/pw_tokenizer/argument_types_test_c.c
@@ -72,10 +72,10 @@
static char char_array[16];
// Define the test functions that are called by the C++ unit test.
-#define DEFINE_TEST_FUNCTION(name, ...) \
- pw_TokenizerArgTypes pw_TestTokenizer##name(void) { \
- (void)char_array; \
- return PW_TOKENIZER_ARG_TYPES(__VA_ARGS__); \
+#define DEFINE_TEST_FUNCTION(name, ...) \
+ _pw_tokenizer_ArgTypes pw_TestTokenizer##name(void) { \
+ (void)char_array; \
+ return PW_TOKENIZER_ARG_TYPES(__VA_ARGS__); \
}
DEFINE_TEST_FUNCTION(NoArgs);
diff --git a/pw_tokenizer/base64.cc b/pw_tokenizer/base64.cc
index 6ce4a4c..3404916 100644
--- a/pw_tokenizer/base64.cc
+++ b/pw_tokenizer/base64.cc
@@ -20,7 +20,7 @@
namespace pw::tokenizer {
-extern "C" size_t pw_TokenizerPrefixedBase64Encode(
+extern "C" size_t pw_tokenizer_PrefixedBase64Encode(
const void* binary_message,
size_t binary_size_bytes,
void* output_buffer,
@@ -41,10 +41,10 @@
return encoded_size;
}
-extern "C" size_t pw_TokenizerPrefixedBase64Decode(const void* base64_message,
- size_t base64_size_bytes,
- void* output_buffer,
- size_t output_buffer_size) {
+extern "C" size_t pw_tokenizer_PrefixedBase64Decode(const void* base64_message,
+ size_t base64_size_bytes,
+ void* output_buffer,
+ size_t output_buffer_size) {
const char* base64 = static_cast<const char*>(base64_message);
if (base64_size_bytes == 0 || base64[0] != kBase64Prefix) {
diff --git a/pw_tokenizer/docs.rst b/pw_tokenizer/docs.rst
index f9f8536..2d6ffe4 100644
--- a/pw_tokenizer/docs.rst
+++ b/pw_tokenizer/docs.rst
@@ -165,15 +165,15 @@
``PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES``.
This macro is provided by the ``pw_tokenizer:global_handler`` facade. The
-backend for this facade must define the ``pw_TokenizerHandleEncodedMessage``
+backend for this facade must define the ``pw_tokenizer_HandleEncodedMessage``
C-linkage function.
.. code-block:: cpp
PW_TOKENIZE_TO_GLOBAL_HANDLER(format_string_literal, arguments...);
- void pw_TokenizerHandleEncodedMessage(const uint8_t encoded_message[],
- size_t size_bytes);
+ void pw_tokenizer_HandleEncodedMessage(const uint8_t encoded_message[],
+ size_t size_bytes);
``PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD`` is similar, but passes a
``uintptr_t`` argument to the global handler function. Values like a log level
@@ -181,7 +181,7 @@
This macro is provided by the ``pw_tokenizer:global_handler_with_payload``
facade. The backend for this facade must define the
-``pw_TokenizerHandleEncodedMessageWithPayload`` C-linkage function.
+``pw_tokenizer_HandleEncodedMessageWithPayload`` C-linkage function.
.. code-block:: cpp
@@ -189,9 +189,8 @@
format_string_literal,
arguments...);
- void pw_TokenizerHandleEncodedMessageWithPayload(uintptr_t payload,
- const uint8_t encoded_message[],
- size_t size_bytes);
+ void pw_tokenizer_HandleEncodedMessageWithPayload(
+ uintptr_t payload, const uint8_t encoded_message[], size_t size_bytes);
.. admonition:: When to use these macros
@@ -268,19 +267,19 @@
It is trivial to convert this to a binary log using the tokenizer. The
``RecordLog`` call is replaced with a
``PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD`` invocation. The
-``pw_TokenizerHandleEncodedMessageWithPayload`` implementation collects the
+``pw_tokenizer_HandleEncodedMessageWithPayload`` implementation collects the
timestamp and transmits the message with ``TransmitLog``.
.. code-block:: cpp
#define LOG_INFO(format, ...) \
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD( \
- (uintptr_t)LogLevel_INFO, \
+ (pw_tokenizer_Payload)LogLevel_INFO, \
__FILE_NAME__ ":%d " format, \
__LINE__, \
__VA_ARGS__); \
- extern "C" void pw_TokenizerHandleEncodedMessageWithPayload(
+ extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
uintptr_t level, const uint8_t encoded_message[], size_t size_bytes) {
if (static_cast<LogLevel>(level) >= current_log_level) {
TransmitLog(TimeSinceBootMillis(), encoded_message, size_bytes);
@@ -673,12 +672,12 @@
Encoding
--------
To encode with the Base64 format, add a call to
-``pw::tokenizer::PrefixedBase64Encode`` or ``pw_TokenizerPrefixedBase64Encode``
+``pw::tokenizer::PrefixedBase64Encode`` or ``pw_tokenizer_PrefixedBase64Encode``
in the tokenizer handler function. For example,
.. code-block:: cpp
- void pw_TokenizerHandleEncodedMessage(const uint8_t encoded_message[],
+ void pw_tokenizer_HandleEncodedMessage(const uint8_t encoded_message[],
size_t size_bytes) {
char base64_buffer[64];
size_t base64_size = pw::tokenizer::PrefixedBase64Encode(
@@ -707,12 +706,12 @@
"$pEVTYQkkUmhZam1RPT0=" → "Nested message: $RhYjmQ==" → "Nested message: Wow!"
Base64 decoding is supported in C++ or C with the
-``pw::tokenizer::PrefixedBase64Decode`` or ``pw_TokenizerPrefixedBase64Decode``
+``pw::tokenizer::PrefixedBase64Decode`` or ``pw_tokenizer_PrefixedBase64Decode``
functions.
.. code-block:: cpp
- void pw_TokenizerHandleEncodedMessage(const uint8_t encoded_message[],
+ void pw_tokenizer_HandleEncodedMessage(const uint8_t encoded_message[],
size_t size_bytes) {
char base64_buffer[64];
size_t base64_size = pw::tokenizer::PrefixedBase64Encode(
@@ -780,7 +779,7 @@
* The log level was passed as the payload argument to facilitate runtime log
level control.
* For this project, it was necessary to encode the log messages as text. In
- ``pw_TokenizerHandleEncodedMessageWithPayload``, the log messages were
+ ``pw_tokenizer_HandleEncodedMessageWithPayload``, the log messages were
encoded in the $-prefixed `Base64 format`_, then dispatched as normal log
messages.
* Asserts were tokenized using ``PW_TOKENIZE_TO_CALLBACK``.
diff --git a/pw_tokenizer/encode_args.cc b/pw_tokenizer/encode_args.cc
index b53cc22..d18e1eb 100644
--- a/pw_tokenizer/encode_args.cc
+++ b/pw_tokenizer/encode_args.cc
@@ -124,7 +124,7 @@
} // namespace
-size_t EncodeArgs(pw_TokenizerArgTypes types,
+size_t EncodeArgs(_pw_tokenizer_ArgTypes types,
va_list args,
std::span<uint8_t> output) {
size_t arg_count = types & PW_TOKENIZER_TYPE_COUNT_MASK;
diff --git a/pw_tokenizer/global_handlers_test.cc b/pw_tokenizer/global_handlers_test.cc
index 89885a8..1728a5f 100644
--- a/pw_tokenizer/global_handlers_test.cc
+++ b/pw_tokenizer/global_handlers_test.cc
@@ -104,7 +104,7 @@
}
TEST_F(TokenizeToGlobalHandler, C_SequentialZigZag) {
- pw_TokenizeToGlobalHandlerTest_SequentialZigZag();
+ pw_tokenizer_ToGlobalHandlerTest_SequentialZigZag();
constexpr std::array<uint8_t, 18> expected =
ExpectedData<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13>(
@@ -113,15 +113,15 @@
EXPECT_EQ(std::memcmp(expected.data(), message_, expected.size()), 0);
}
-extern "C" void pw_TokenizerHandleEncodedMessage(const uint8_t* encoded_message,
- size_t size_bytes) {
+extern "C" void pw_tokenizer_HandleEncodedMessage(
+ const uint8_t* encoded_message, size_t size_bytes) {
TokenizeToGlobalHandler::SetMessage(encoded_message, size_bytes);
}
class TokenizeToGlobalHandlerWithPayload
: public GlobalMessage<TokenizeToGlobalHandlerWithPayload> {
public:
- static void SetPayload(pw_TokenizerPayload payload) {
+ static void SetPayload(pw_tokenizer_Payload payload) {
payload_ = static_cast<intptr_t>(payload);
}
@@ -140,13 +140,18 @@
ExpectedData<0, 0, 0x00, 0x00, 0x00, 0x80, 0>("%x%lld%1.2f%s");
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- static_cast<pw_TokenizerPayload>(123), "%x%lld%1.2f%s", 0, 0ll, -0.0, "");
+ static_cast<pw_tokenizer_Payload>(123),
+ "%x%lld%1.2f%s",
+ 0,
+ 0ll,
+ -0.0,
+ "");
ASSERT_EQ(expected.size(), message_size_bytes_);
EXPECT_EQ(std::memcmp(expected.data(), message_, expected.size()), 0);
EXPECT_EQ(payload_, 123);
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- static_cast<pw_TokenizerPayload>(-543),
+ static_cast<pw_tokenizer_Payload>(-543),
"%x%lld%1.2f%s",
0,
0ll,
@@ -170,7 +175,7 @@
TEST_F(TokenizeToGlobalHandlerWithPayload, Strings_NonZeroPayload) {
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- static_cast<pw_TokenizerPayload>(5432), "The answer is: %s", "5432!");
+ static_cast<pw_tokenizer_Payload>(5432), "The answer is: %s", "5432!");
ASSERT_EQ(kExpected.size(), message_size_bytes_);
EXPECT_EQ(std::memcmp(kExpected.data(), message_, kExpected.size()), 0);
@@ -180,7 +185,7 @@
TEST_F(TokenizeToGlobalHandlerWithPayload, Domain_Strings) {
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD_DOMAIN(
"TEST_DOMAIN",
- static_cast<pw_TokenizerPayload>(5432),
+ static_cast<pw_tokenizer_Payload>(5432),
"The answer is: %s",
"5432!");
ASSERT_EQ(kExpected.size(), message_size_bytes_);
@@ -197,7 +202,7 @@
Foo foo{254u, true};
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- reinterpret_cast<pw_TokenizerPayload>(&foo), "Boring!");
+ reinterpret_cast<pw_tokenizer_Payload>(&foo), "Boring!");
constexpr auto expected = ExpectedData("Boring!");
static_assert(expected.size() == 4);
@@ -211,7 +216,7 @@
}
TEST_F(TokenizeToGlobalHandlerWithPayload, C_SequentialZigZag) {
- pw_TokenizeToGlobalHandlerWithPayloadTest_SequentialZigZag();
+ pw_tokenizer_ToGlobalHandlerWithPayloadTest_SequentialZigZag();
constexpr std::array<uint8_t, 18> expected =
ExpectedData<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13>(
@@ -221,8 +226,8 @@
EXPECT_EQ(payload_, 600613);
}
-extern "C" void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload payload,
+extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload payload,
const uint8_t* encoded_message,
size_t size_bytes) {
TokenizeToGlobalHandlerWithPayload::SetMessage(encoded_message, size_bytes);
@@ -261,7 +266,7 @@
const char* string_literal = nullptr;
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- static_cast<pw_TokenizerPayload>(123), "Wow%s", "???");
+ static_cast<pw_tokenizer_Payload>(123), "Wow%s", "???");
EXPECT_STREQ(tokenizer_domain, PW_TOKENIZER_DEFAULT_DOMAIN);
EXPECT_STREQ(string_literal, "Wow%s");
@@ -272,7 +277,7 @@
const char* string_literal = nullptr;
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD_DOMAIN(
- "THEDOMAIN", static_cast<pw_TokenizerPayload>(123), "1234567890");
+ "THEDOMAIN", static_cast<pw_tokenizer_Payload>(123), "1234567890");
EXPECT_STREQ(tokenizer_domain, "THEDOMAIN");
EXPECT_STREQ(string_literal, "1234567890");
diff --git a/pw_tokenizer/global_handlers_test_c.c b/pw_tokenizer/global_handlers_test_c.c
index e0619e5..04d5551 100644
--- a/pw_tokenizer/global_handlers_test_c.c
+++ b/pw_tokenizer/global_handlers_test_c.c
@@ -26,7 +26,7 @@
// This test invokes the tokenization API with a variety of types. To simplify
// validating the encoded data, numbers that are sequential when zig-zag encoded
// are used as arguments.
-void pw_TokenizeToGlobalHandlerTest_SequentialZigZag(void) {
+void pw_tokenizer_ToGlobalHandlerTest_SequentialZigZag(void) {
PW_TOKENIZE_TO_GLOBAL_HANDLER(TEST_FORMAT_SEQUENTIAL_ZIG_ZAG,
0u,
-1,
@@ -44,8 +44,8 @@
(signed char)-7);
}
-void pw_TokenizeToGlobalHandlerWithPayloadTest_SequentialZigZag(void) {
- PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD((pw_TokenizerPayload)600613,
+void pw_tokenizer_ToGlobalHandlerWithPayloadTest_SequentialZigZag(void) {
+ PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD((pw_tokenizer_Payload)600613,
TEST_FORMAT_SEQUENTIAL_ZIG_ZAG,
0u,
-1,
diff --git a/pw_tokenizer/public/pw_tokenizer/base64.h b/pw_tokenizer/public/pw_tokenizer/base64.h
index bf84e29..e91e66d 100644
--- a/pw_tokenizer/public/pw_tokenizer/base64.h
+++ b/pw_tokenizer/public/pw_tokenizer/base64.h
@@ -43,20 +43,20 @@
// too small.
//
// Equivalent to pw::tokenizer::PrefixedBase64Encode.
-size_t pw_TokenizerPrefixedBase64Encode(const void* binary_message,
- size_t binary_size_bytes,
- void* output_buffer,
- size_t output_buffer_size_bytes);
+size_t pw_tokenizer_PrefixedBase64Encode(const void* binary_message,
+ size_t binary_size_bytes,
+ void* output_buffer,
+ size_t output_buffer_size_bytes);
// Decodes a prefixed Base64 tokenized message to binary. Returns the size of
// the decoded binary data. The resulting data is ready to be passed to
// pw::tokenizer::Detokenizer::Detokenize. Returns 0 if the buffer is too small,
// the expected prefix character is missing, or the Base64 data is corrupt.
//
// Equivalent to pw::tokenizer::PrefixedBase64Encode.
-size_t pw_TokenizerPrefixedBase64Decode(const void* base64_message,
- size_t base64_size_bytes,
- void* output_buffer,
- size_t output_buffer_size);
+size_t pw_tokenizer_PrefixedBase64Decode(const void* base64_message,
+ size_t base64_size_bytes,
+ void* output_buffer,
+ size_t output_buffer_size);
PW_EXTERN_C_END
@@ -74,10 +74,10 @@
// too small or does not start with kBase64Prefix.
inline size_t PrefixedBase64Encode(std::span<const std::byte> binary_message,
std::span<char> output_buffer) {
- return pw_TokenizerPrefixedBase64Encode(binary_message.data(),
- binary_message.size(),
- output_buffer.data(),
- output_buffer.size());
+ return pw_tokenizer_PrefixedBase64Encode(binary_message.data(),
+ binary_message.size(),
+ output_buffer.data(),
+ output_buffer.size());
}
// Also accept a std::span<const uint8_t> for the binary message.
@@ -91,16 +91,16 @@
// pw::tokenizer::Detokenizer::Detokenize.
inline size_t PrefixedBase64Decode(std::string_view base64_message,
std::span<std::byte> output_buffer) {
- return pw_TokenizerPrefixedBase64Decode(base64_message.data(),
- base64_message.size(),
- output_buffer.data(),
- output_buffer.size());
+ return pw_tokenizer_PrefixedBase64Decode(base64_message.data(),
+ base64_message.size(),
+ output_buffer.data(),
+ output_buffer.size());
}
// Decodes a prefixed Base64 tokenized message to binary in place. Returns the
// size of the decoded binary data.
inline size_t PrefixedBase64DecodeInPlace(std::span<std::byte> buffer) {
- return pw_TokenizerPrefixedBase64Decode(
+ return pw_tokenizer_PrefixedBase64Decode(
buffer.data(), buffer.size(), buffer.data(), buffer.size());
}
diff --git a/pw_tokenizer/public/pw_tokenizer/internal/argument_types.h b/pw_tokenizer/public/pw_tokenizer/internal/argument_types.h
index ecff29d..8611577 100644
--- a/pw_tokenizer/public/pw_tokenizer/internal/argument_types.h
+++ b/pw_tokenizer/public/pw_tokenizer/internal/argument_types.h
@@ -31,7 +31,7 @@
#define PW_TOKENIZER_TYPE_COUNT_SIZE_BITS 4u
#define PW_TOKENIZER_TYPE_COUNT_MASK 0x0Fu
-typedef uint32_t pw_TokenizerArgTypes;
+typedef uint32_t _pw_tokenizer_ArgTypes;
#elif PW_TOKENIZER_CFG_ARG_TYPES_SIZE_BYTES == 8
@@ -42,7 +42,7 @@
#define PW_TOKENIZER_TYPE_COUNT_SIZE_BITS 6u
#define PW_TOKENIZER_TYPE_COUNT_MASK 0x1Fu // only 5 bits will be needed
-typedef uint64_t pw_TokenizerArgTypes;
+typedef uint64_t _pw_tokenizer_ArgTypes;
#else
@@ -52,7 +52,7 @@
// The tokenized string encoding function is a variadic function that works
// similarly to printf. Instead of a format string, however, the argument types
-// are packed into a pw_TokenizerArgTypes.
+// are packed into a _pw_tokenizer_ArgTypes.
//
// The four supported argument types are represented by two-bit argument codes.
// Just four types are required because only printf-compatible arguments are
@@ -62,10 +62,10 @@
// char* values cannot be printed as pointers with %p. These arguments are
// always encoded as strings. To format a char* as an address, cast it to void*
// or an integer.
-#define PW_TOKENIZER_ARG_TYPE_INT ((pw_TokenizerArgTypes)0)
-#define PW_TOKENIZER_ARG_TYPE_INT64 ((pw_TokenizerArgTypes)1)
-#define PW_TOKENIZER_ARG_TYPE_DOUBLE ((pw_TokenizerArgTypes)2)
-#define PW_TOKENIZER_ARG_TYPE_STRING ((pw_TokenizerArgTypes)3)
+#define PW_TOKENIZER_ARG_TYPE_INT ((_pw_tokenizer_ArgTypes)0)
+#define PW_TOKENIZER_ARG_TYPE_INT64 ((_pw_tokenizer_ArgTypes)1)
+#define PW_TOKENIZER_ARG_TYPE_DOUBLE ((_pw_tokenizer_ArgTypes)2)
+#define PW_TOKENIZER_ARG_TYPE_STRING ((_pw_tokenizer_ArgTypes)3)
// Select the int argument type based on the size of the type. Values smaller
// than int are promoted to int.
@@ -89,7 +89,7 @@
// This function selects the matching type enum for supported argument types.
template <typename T>
-constexpr pw_TokenizerArgTypes VarargsType() {
+constexpr _pw_tokenizer_ArgTypes VarargsType() {
using ArgType = std::decay_t<T>;
if constexpr (std::is_floating_point<ArgType>()) {
@@ -116,26 +116,26 @@
template <typename T, bool kDontCare1, bool kDontCare2>
struct SelectVarargsType<T, true, kDontCare1, kDontCare2> {
- static constexpr pw_TokenizerArgTypes kValue = PW_TOKENIZER_ARG_TYPE_DOUBLE;
+ static constexpr _pw_tokenizer_ArgTypes kValue = PW_TOKENIZER_ARG_TYPE_DOUBLE;
};
template <typename T, bool kDontCare>
struct SelectVarargsType<T, false, true, kDontCare> {
- static constexpr pw_TokenizerArgTypes kValue = PW_TOKENIZER_ARG_TYPE_STRING;
+ static constexpr _pw_tokenizer_ArgTypes kValue = PW_TOKENIZER_ARG_TYPE_STRING;
};
template <typename T>
struct SelectVarargsType<T, false, false, true> {
- static constexpr pw_TokenizerArgTypes kValue = PW_TOKENIZER_ARG_TYPE_INT64;
+ static constexpr _pw_tokenizer_ArgTypes kValue = PW_TOKENIZER_ARG_TYPE_INT64;
};
template <typename T>
struct SelectVarargsType<T, false, false, false> {
- static constexpr pw_TokenizerArgTypes kValue = PW_TOKENIZER_ARG_TYPE_INT;
+ static constexpr _pw_tokenizer_ArgTypes kValue = PW_TOKENIZER_ARG_TYPE_INT;
};
template <typename T>
-constexpr pw_TokenizerArgTypes VarargsType() {
+constexpr _pw_tokenizer_ArgTypes VarargsType() {
return SelectVarargsType<typename std::decay<T>::type>::kValue;
}
@@ -174,14 +174,14 @@
#endif // __cplusplus
-// Encodes the types of the provided arguments as a pw_TokenizerArgTypes value.
-// Depending on the size of pw_TokenizerArgTypes, the bottom 4 or 6 bits store
-// the number of arguments and the remaining bits store the types, two bits per
-// type.
+// Encodes the types of the provided arguments as a _pw_tokenizer_ArgTypes
+// value. Depending on the size of _pw_tokenizer_ArgTypes, the bottom 4 or 6
+// bits store the number of arguments and the remaining bits store the types,
+// two bits per type.
//
// The arguments are not evaluated; only their types are used to
// select the set their corresponding PW_TOKENIZER_ARG_TYPEs.
#define PW_TOKENIZER_ARG_TYPES(...) \
PW_DELEGATE_BY_ARG_COUNT(_PW_TOKENIZER_TYPES_, __VA_ARGS__)
-#define _PW_TOKENIZER_TYPES_0() ((pw_TokenizerArgTypes)0)
+#define _PW_TOKENIZER_TYPES_0() ((_pw_tokenizer_ArgTypes)0)
diff --git a/pw_tokenizer/public/pw_tokenizer/internal/tokenize_string.h b/pw_tokenizer/public/pw_tokenizer/internal/tokenize_string.h
index 27434bd..73d2739 100644
--- a/pw_tokenizer/public/pw_tokenizer/internal/tokenize_string.h
+++ b/pw_tokenizer/public/pw_tokenizer/internal/tokenize_string.h
@@ -64,6 +64,3 @@
#endif // PW_TOKENIZER_CFG_HASH_LENGTH
#endif // __cpp_constexpr >= 201304L && defined(__cpp_inline_variables)
-
-// The type of the token used in place of a format string.
-typedef uint32_t pw_TokenizerStringToken;
diff --git a/pw_tokenizer/public/pw_tokenizer/tokenize.h b/pw_tokenizer/public/pw_tokenizer/tokenize.h
index c988fe5..94a076e 100644
--- a/pw_tokenizer/public/pw_tokenizer/tokenize.h
+++ b/pw_tokenizer/public/pw_tokenizer/tokenize.h
@@ -34,6 +34,10 @@
#include "pw_tokenizer/internal/argument_types.h"
#include "pw_tokenizer/internal/tokenize_string.h"
+// The type of the token used in place of a format string. Also available as
+// pw::tokenizer::Token.
+typedef uint32_t pw_tokenizer_Token;
+
// Strings may optionally be tokenized to a domain. Strings in different domains
// can be processed separately by the token database tools. Each domain in use
// must have a corresponding section declared in the linker script. See
@@ -42,7 +46,7 @@
// If no domain is specified, this default is used.
#define PW_TOKENIZER_DEFAULT_DOMAIN "default"
-// Tokenizes a string and converts it to a pw_TokenizerStringToken. In C++, the
+// Tokenizes a string and converts it to a pw_tokenizer_Token. In C++, the
// string may be a literal or a constexpr char array. In C, the argument must be
// a string literal.
//
@@ -94,15 +98,15 @@
__VA_ARGS__)
// Same as PW_TOKENIZE_TO_BUFFER, but tokenizes to the specified domain.
-#define PW_TOKENIZE_TO_BUFFER_DOMAIN( \
- domain, buffer, buffer_size_pointer, format, ...) \
- do { \
- _PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
- _pw_TokenizeToBuffer(buffer, \
- buffer_size_pointer, \
- _pw_tokenizer_token, \
- PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
- PW_COMMA_ARGS(__VA_ARGS__)); \
+#define PW_TOKENIZE_TO_BUFFER_DOMAIN( \
+ domain, buffer, buffer_size_pointer, format, ...) \
+ do { \
+ _PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
+ _pw_tokenizer_ToBuffer(buffer, \
+ buffer_size_pointer, \
+ _pw_tokenizer_token, \
+ PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
+ PW_COMMA_ARGS(__VA_ARGS__)); \
} while (0)
// Encodes a tokenized string and arguments to a buffer on the stack. The
@@ -140,33 +144,33 @@
#define PW_TOKENIZE_TO_CALLBACK_DOMAIN(domain, callback, format, ...) \
do { \
_PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
- _pw_TokenizeToCallback(callback, \
- _pw_tokenizer_token, \
- PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
- PW_COMMA_ARGS(__VA_ARGS__)); \
+ _pw_tokenizer_ToCallback(callback, \
+ _pw_tokenizer_token, \
+ PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
+ PW_COMMA_ARGS(__VA_ARGS__)); \
} while (0)
PW_EXTERN_C_START
// These functions encode the tokenized strings. These should not be called
// directly. Instead, use the corresponding PW_TOKENIZE_TO_* macros above.
-void _pw_TokenizeToBuffer(void* buffer,
- size_t* buffer_size_bytes, // input and output arg
- pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
- ...);
-
-void _pw_TokenizeToCallback(void (*callback)(const uint8_t* encoded_message,
- size_t size_bytes),
- pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
+void _pw_tokenizer_ToBuffer(void* buffer,
+ size_t* buffer_size_bytes, // input and output arg
+ pw_tokenizer_Token token,
+ _pw_tokenizer_ArgTypes types,
...);
+void _pw_tokenizer_ToCallback(void (*callback)(const uint8_t* encoded_message,
+ size_t size_bytes),
+ pw_tokenizer_Token token,
+ _pw_tokenizer_ArgTypes types,
+ ...);
+
// This empty function allows the compiler to check the format string.
-inline void pw_TokenizerCheckFormatString(const char* format, ...)
+static inline void pw_tokenizer_CheckFormatString(const char* format, ...)
PW_PRINTF_FORMAT(1, 2);
-inline void pw_TokenizerCheckFormatString(const char* format, ...) {
+static inline void pw_tokenizer_CheckFormatString(const char* format, ...) {
PW_UNUSED(format);
}
@@ -179,9 +183,9 @@
// checks that the arguments are correct, stores the format string in a special
// section, and calculates the string's token at compile time.
// clang-format off
-#define _PW_TOKENIZE_FORMAT_STRING(domain, format, ...) \
+#define _PW_TOKENIZE_FORMAT_STRING(domain, format, ...) \
if (0) { /* Do not execute to prevent double evaluation of the arguments. */ \
- pw_TokenizerCheckFormatString(format PW_COMMA_ARGS(__VA_ARGS__)); \
+ pw_tokenizer_CheckFormatString(format PW_COMMA_ARGS(__VA_ARGS__)); \
} \
\
/* Check that the macro is invoked with a supported number of arguments. */ \
@@ -192,16 +196,28 @@
PW_STRINGIFY(PW_ARG_COUNT(__VA_ARGS__)) " arguments were used for " \
#format " (" #__VA_ARGS__ ")"); \
\
- /* Tokenize the string to a pw_TokenizerStringToken at compile time. */ \
- _PW_TOKENIZER_CONST pw_TokenizerStringToken _pw_tokenizer_token = \
+ /* Tokenize the string to a pw_tokenizer_Token at compile time. */ \
+ _PW_TOKENIZER_CONST pw_tokenizer_Token _pw_tokenizer_token = \
PW_TOKENIZE_STRING_DOMAIN(domain, format)
// clang-format on
#ifdef __cplusplus // use constexpr for C++
+
#define _PW_TOKENIZER_CONST constexpr
+
+namespace pw {
+namespace tokenizer {
+
+using Token = ::pw_tokenizer_Token;
+
+} // namespace tokenizer
+} // namespace pw
+
#else // use const for C
+
#define _PW_TOKENIZER_CONST const
+
#endif // __cplusplus
// _PW_TOKENIZER_SECTION places the format string in a special .pw_tokenized
diff --git a/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler.h b/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler.h
index ce41741..e973326 100644
--- a/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler.h
+++ b/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler.h
@@ -20,7 +20,7 @@
#include "pw_tokenizer/tokenize.h"
// Encodes a tokenized string and arguments to a buffer on the stack. The buffer
-// is passed to the user-defined pw_TokenizerHandleEncodedMessage function. The
+// is passed to the user-defined pw_tokenizer_HandleEncodedMessage function. The
// size of the stack-allocated argument encoding buffer is set with the
// PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES option.
//
@@ -31,13 +31,13 @@
//
// For example, the following encodes a tokenized string with a value returned
// from a function call. The encoded message is passed to the caller-defined
-// pw_TokenizerHandleEncodedMessage function.
+// pw_tokenizer_HandleEncodedMessage function.
//
// void OutputLastReadSize() {
// PW_TOKENIZE_TO_GLOBAL_HANDLER("Read %u bytes", ReadSizeBytes());
// }
//
-// void pw_TokenizerHandleEncodedMessage(const uint8_t encoded_message[],
+// void pw_tokenizer_HandleEncodedMessage(const uint8_t encoded_message[],
// size_t size_bytes) {
// MyProject_EnqueueMessageForUart(buffer, size_bytes);
// }
@@ -47,26 +47,30 @@
PW_TOKENIZER_DEFAULT_DOMAIN, format, __VA_ARGS__)
// Same as PW_TOKENIZE_TO_GLOBAL_HANDLER, but tokenizes to the specified domain.
-#define PW_TOKENIZE_TO_GLOBAL_HANDLER_DOMAIN(domain, format, ...) \
- do { \
- _PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
- _pw_TokenizeToGlobalHandler(_pw_tokenizer_token, \
- PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
- PW_COMMA_ARGS(__VA_ARGS__)); \
+#define PW_TOKENIZE_TO_GLOBAL_HANDLER_DOMAIN(domain, format, ...) \
+ do { \
+ _PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
+ _pw_tokenizer_ToGlobalHandler(_pw_tokenizer_token, \
+ PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
+ PW_COMMA_ARGS(__VA_ARGS__)); \
} while (0)
PW_EXTERN_C_START
// This function must be defined by the pw_tokenizer:global_handler backend.
// This function is called with the encoded message by
-// pw_TokenizeToGlobalHandler.
-void pw_TokenizerHandleEncodedMessage(const uint8_t encoded_message[],
- size_t size_bytes);
+// _pw_tokenizer_ToGlobalHandler.
+void pw_tokenizer_HandleEncodedMessage(const uint8_t encoded_message[],
+ size_t size_bytes);
+
+// TODO(hepler): Remove this alias when all projects have migrated to the new
+// function name.
+#define pw_TokenizerHandleEncodedMessage pw_tokenizer_HandleEncodedMessage
// This function encodes the tokenized strings. Do not call it directly;
// instead, use the PW_TOKENIZE_TO_GLOBAL_HANDLER macro.
-void _pw_TokenizeToGlobalHandler(pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
- ...);
+void _pw_tokenizer_ToGlobalHandler(pw_tokenizer_Token token,
+ _pw_tokenizer_ArgTypes types,
+ ...);
PW_EXTERN_C_END
diff --git a/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler_with_payload.h b/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler_with_payload.h
index df1800f..0b1fe51 100644
--- a/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler_with_payload.h
+++ b/pw_tokenizer/public/pw_tokenizer/tokenize_to_global_handler_with_payload.h
@@ -22,7 +22,7 @@
// Like PW_TOKENIZE_TO_GLOBAL_HANDLER, encodes a tokenized string and arguments
// to a buffer on the stack. The macro adds a payload argument, which is passed
// through to the global handler function
-// pw_TokenizerHandleEncodedMessageWithPayload, which must be defined by the
+// pw_tokenizer_HandleEncodedMessageWithPayload, which must be defined by the
// user of pw_tokenizer. The payload is a uintptr_t.
//
// For example, the following tokenizes a log string and passes the log level as
@@ -31,8 +31,8 @@
#define LOG_ERROR(...) \
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(kLogLevelError, __VA_ARGS__)
- void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload log_level,
+ void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload log_level,
const uint8_t encoded_message[],
size_t size_bytes) {
if (log_level >= kLogLevelWarning) {
@@ -46,33 +46,42 @@
// Same as PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD, but tokenizes to the
// specified domain.
-#define PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD_DOMAIN( \
- domain, payload, format, ...) \
- do { \
- _PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
- _pw_TokenizeToGlobalHandlerWithPayload(payload, \
- _pw_tokenizer_token, \
- PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
- PW_COMMA_ARGS(__VA_ARGS__)); \
+#define PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD_DOMAIN( \
+ domain, payload, format, ...) \
+ do { \
+ _PW_TOKENIZE_FORMAT_STRING(domain, format, __VA_ARGS__); \
+ _pw_tokenizer_ToGlobalHandlerWithPayload( \
+ payload, \
+ _pw_tokenizer_token, \
+ PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) PW_COMMA_ARGS(__VA_ARGS__)); \
} while (0)
PW_EXTERN_C_START
-typedef uintptr_t pw_TokenizerPayload;
+typedef uintptr_t pw_tokenizer_Payload;
+
+// TODO(hepler): Remove this alias when all projects have migrated to the new
+// typedef name.
+typedef pw_tokenizer_Payload pw_TokenizerPayload;
// This function must be defined pw_tokenizer:global_handler_with_payload
// backend. This function is called with the encoded message by
-// pw_TokenizeToGlobalHandler and a caller-provided payload argument.
-void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload payload,
+// pw_tokenizer_ToGlobalHandler and a caller-provided payload argument.
+void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload payload,
const uint8_t encoded_message[],
size_t size_bytes);
+// TODO(hepler): Remove this alias when all projects have migrated to the new
+// function name.
+#define pw_TokenizerHandleEncodedMessageWithPayload \
+ pw_tokenizer_HandleEncodedMessageWithPayload
+
// This function encodes the tokenized strings. Do not call it directly;
// instead, use the PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD macro.
-void _pw_TokenizeToGlobalHandlerWithPayload(pw_TokenizerPayload payload,
- pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
- ...);
+void _pw_tokenizer_ToGlobalHandlerWithPayload(pw_tokenizer_Payload payload,
+ pw_tokenizer_Token token,
+ _pw_tokenizer_ArgTypes types,
+ ...);
PW_EXTERN_C_END
diff --git a/pw_tokenizer/pw_tokenizer_private/argument_types_test.h b/pw_tokenizer/pw_tokenizer_private/argument_types_test.h
index 52ac6c9..b616392 100644
--- a/pw_tokenizer/pw_tokenizer_private/argument_types_test.h
+++ b/pw_tokenizer/pw_tokenizer_private/argument_types_test.h
@@ -20,27 +20,27 @@
PW_EXTERN_C_START
-pw_TokenizerArgTypes pw_TestTokenizerNoArgs(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerNoArgs(void);
-pw_TokenizerArgTypes pw_TestTokenizerChar(void);
-pw_TokenizerArgTypes pw_TestTokenizerUint8(void);
-pw_TokenizerArgTypes pw_TestTokenizerUint16(void);
-pw_TokenizerArgTypes pw_TestTokenizerInt32(void);
-pw_TokenizerArgTypes pw_TestTokenizerInt64(void);
-pw_TokenizerArgTypes pw_TestTokenizerUint64(void);
-pw_TokenizerArgTypes pw_TestTokenizerFloat(void);
-pw_TokenizerArgTypes pw_TestTokenizerDouble(void);
-pw_TokenizerArgTypes pw_TestTokenizerString(void);
-pw_TokenizerArgTypes pw_TestTokenizerMutableString(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerChar(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerUint8(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerUint16(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerInt32(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerInt64(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerUint64(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerFloat(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerDouble(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerString(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerMutableString(void);
-pw_TokenizerArgTypes pw_TestTokenizerIntFloat(void);
-pw_TokenizerArgTypes pw_TestTokenizerUint64Char(void);
-pw_TokenizerArgTypes pw_TestTokenizerStringString(void);
-pw_TokenizerArgTypes pw_TestTokenizerUint16Int(void);
-pw_TokenizerArgTypes pw_TestTokenizerFloatString(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerIntFloat(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerUint64Char(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerStringString(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerUint16Int(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerFloatString(void);
-pw_TokenizerArgTypes pw_TestTokenizerNull(void);
-pw_TokenizerArgTypes pw_TestTokenizerPointer(void);
-pw_TokenizerArgTypes pw_TestTokenizerPointerPointer(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerNull(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerPointer(void);
+_pw_tokenizer_ArgTypes pw_TestTokenizerPointerPointer(void);
PW_EXTERN_C_END
diff --git a/pw_tokenizer/pw_tokenizer_private/encode_args.h b/pw_tokenizer/pw_tokenizer_private/encode_args.h
index 2f39949..7a85963 100644
--- a/pw_tokenizer/pw_tokenizer_private/encode_args.h
+++ b/pw_tokenizer/pw_tokenizer_private/encode_args.h
@@ -20,23 +20,24 @@
#include "pw_tokenizer/config.h"
#include "pw_tokenizer/internal/argument_types.h"
-#include "pw_tokenizer/internal/tokenize_string.h"
+#include "pw_tokenizer/tokenize.h"
namespace pw {
namespace tokenizer {
// Buffer for encoding a tokenized string and arguments.
struct EncodedMessage {
- pw_TokenizerStringToken token;
+ pw_tokenizer_Token token;
std::array<uint8_t, PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES> args;
};
static_assert(offsetof(EncodedMessage, args) == sizeof(EncodedMessage::token),
"EncodedMessage should not have padding bytes between members");
-// Encodes a tokenized string's arguments to a buffer. The pw_TokenizerArgTypes
-// parameter specifies the argument types, in place of a format string.
-size_t EncodeArgs(pw_TokenizerArgTypes types,
+// Encodes a tokenized string's arguments to a buffer. The
+// _pw_tokenizer_ArgTypes parameter specifies the argument types, in place of a
+// format string.
+size_t EncodeArgs(_pw_tokenizer_ArgTypes types,
va_list args,
std::span<uint8_t> output);
diff --git a/pw_tokenizer/pw_tokenizer_private/tokenize_test.h b/pw_tokenizer/pw_tokenizer_private/tokenize_test.h
index d67764a..3c4b463 100644
--- a/pw_tokenizer/pw_tokenizer_private/tokenize_test.h
+++ b/pw_tokenizer/pw_tokenizer_private/tokenize_test.h
@@ -25,23 +25,23 @@
#define TEST_FORMAT_STRING_SHORT_FLOAT "Hello %s! %hd %e"
-void pw_TokenizeToBufferTest_StringShortFloat(void* buffer,
- size_t* buffer_size);
+void pw_tokenizer_ToBufferTest_StringShortFloat(void* buffer,
+ size_t* buffer_size);
#define TEST_FORMAT_SEQUENTIAL_ZIG_ZAG "%u%d%02x%X%hu%hhd%d%ld%lu%lld%llu%c%c%c"
-void pw_TokenizeToBufferTest_SequentialZigZag(void* buffer,
- size_t* buffer_size);
+void pw_tokenizer_ToBufferTest_SequentialZigZag(void* buffer,
+ size_t* buffer_size);
-void pw_TokenizeToCallbackTest_SequentialZigZag(
+void pw_tokenizer_ToCallbackTest_SequentialZigZag(
void (*callback)(const uint8_t* buffer, size_t size));
#define TEST_FORMAT_REQUIRES_8 "Won't fit : %s%d"
-void pw_TokenizeToBufferTest_Requires8(void* buffer, size_t* buffer_size);
+void pw_tokenizer_ToBufferTest_Requires8(void* buffer, size_t* buffer_size);
-void pw_TokenizeToGlobalHandlerTest_SequentialZigZag(void);
+void pw_tokenizer_ToGlobalHandlerTest_SequentialZigZag(void);
-void pw_TokenizeToGlobalHandlerWithPayloadTest_SequentialZigZag(void);
+void pw_tokenizer_ToGlobalHandlerWithPayloadTest_SequentialZigZag(void);
PW_EXTERN_C_END
diff --git a/pw_tokenizer/simple_tokenize_test.cc b/pw_tokenizer/simple_tokenize_test.cc
index ac00c91..ba6a5a8 100644
--- a/pw_tokenizer/simple_tokenize_test.cc
+++ b/pw_tokenizer/simple_tokenize_test.cc
@@ -50,7 +50,7 @@
}
TEST(TokenizeStringLiteral, EmptyString_IsZero) {
- constexpr pw_TokenizerStringToken token = PW_TOKENIZE_STRING("");
+ constexpr pw_tokenizer_Token token = PW_TOKENIZE_STRING("");
EXPECT_TRUE(0u == token);
}
@@ -139,15 +139,15 @@
EXPECT_TRUE(std::memcmp(expected.data(), message_, expected.size()) == 0);
}
-extern "C" void pw_TokenizerHandleEncodedMessage(const uint8_t* encoded_message,
- size_t size_bytes) {
+extern "C" void pw_tokenizer_HandleEncodedMessage(
+ const uint8_t* encoded_message, size_t size_bytes) {
TokenizeToGlobalHandler::SetMessage(encoded_message, size_bytes);
}
class TokenizeToGlobalHandlerWithPayload
: public GlobalMessage<TokenizeToGlobalHandlerWithPayload> {
public:
- static void SetPayload(pw_TokenizerPayload payload) {
+ static void SetPayload(pw_tokenizer_Payload payload) {
payload_ = static_cast<intptr_t>(payload);
}
@@ -166,13 +166,18 @@
ExpectedData<0, 0, 0x00, 0x00, 0x00, 0x80, 0>("%x%lld%1.2f%s");
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- static_cast<pw_TokenizerPayload>(123), "%x%lld%1.2f%s", 0, 0ll, -0.0, "");
+ static_cast<pw_tokenizer_Payload>(123),
+ "%x%lld%1.2f%s",
+ 0,
+ 0ll,
+ -0.0,
+ "");
ASSERT_TRUE(expected.size() == message_size_bytes_);
EXPECT_TRUE(std::memcmp(expected.data(), message_, expected.size()) == 0);
EXPECT_TRUE(payload_ == 123);
PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
- static_cast<pw_TokenizerPayload>(-543),
+ static_cast<pw_tokenizer_Payload>(-543),
"%x%lld%1.2f%s",
0,
0ll,
@@ -183,8 +188,8 @@
EXPECT_TRUE(payload_ == -543);
}
-extern "C" void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload payload,
+extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload payload,
const uint8_t* encoded_message,
size_t size_bytes) {
TokenizeToGlobalHandlerWithPayload::SetMessage(encoded_message, size_bytes);
diff --git a/pw_tokenizer/tokenize.cc b/pw_tokenizer/tokenize.cc
index 8b1a20a..57e1115 100644
--- a/pw_tokenizer/tokenize.cc
+++ b/pw_tokenizer/tokenize.cc
@@ -25,11 +25,11 @@
namespace pw {
namespace tokenizer {
-extern "C" void _pw_TokenizeToBuffer(void* buffer,
- size_t* buffer_size_bytes,
- pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
- ...) {
+extern "C" void _pw_tokenizer_ToBuffer(void* buffer,
+ size_t* buffer_size_bytes,
+ Token token,
+ _pw_tokenizer_ArgTypes types,
+ ...) {
if (*buffer_size_bytes < sizeof(token)) {
*buffer_size_bytes = 0;
return;
@@ -49,10 +49,10 @@
*buffer_size_bytes = sizeof(token) + encoded_bytes;
}
-extern "C" void _pw_TokenizeToCallback(
+extern "C" void _pw_tokenizer_ToCallback(
void (*callback)(const uint8_t* encoded_message, size_t size_bytes),
- pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
+ Token token,
+ _pw_tokenizer_ArgTypes types,
...) {
EncodedMessage encoded;
encoded.token = token;
diff --git a/pw_tokenizer/tokenize_test.cc b/pw_tokenizer/tokenize_test.cc
index cd96681..6a64c1a 100644
--- a/pw_tokenizer/tokenize_test.cc
+++ b/pw_tokenizer/tokenize_test.cc
@@ -52,7 +52,7 @@
}
TEST(TokenizeString, EmptyString_IsZero) {
- constexpr pw_TokenizerStringToken token = PW_TOKENIZE_STRING("");
+ constexpr pw_tokenizer_Token token = PW_TOKENIZE_STRING("");
EXPECT_EQ(0u, token);
}
@@ -364,7 +364,7 @@
TEST_F(TokenizeToBuffer, C_StringShortFloat) {
size_t size = sizeof(buffer_);
- pw_TokenizeToBufferTest_StringShortFloat(buffer_, &size);
+ pw_tokenizer_ToBufferTest_StringShortFloat(buffer_, &size);
constexpr std::array<uint8_t, 11> expected = // clang-format off
ExpectedData<1, '1', // string '1'
3, // -2 (zig-zag encoded)
@@ -376,7 +376,7 @@
TEST_F(TokenizeToBuffer, C_SequentialZigZag) {
size_t size = sizeof(buffer_);
- pw_TokenizeToBufferTest_SequentialZigZag(buffer_, &size);
+ pw_tokenizer_ToBufferTest_SequentialZigZag(buffer_, &size);
constexpr std::array<uint8_t, 18> expected =
ExpectedData<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13>(
TEST_FORMAT_SEQUENTIAL_ZIG_ZAG);
@@ -390,7 +390,7 @@
{
size_t size = 7;
- pw_TokenizeToBufferTest_Requires8(buffer_, &size);
+ pw_tokenizer_ToBufferTest_Requires8(buffer_, &size);
constexpr std::array<uint8_t, 7> expected =
ExpectedData<2, 'h', 'i'>(TEST_FORMAT_REQUIRES_8);
ASSERT_EQ(expected.size(), size);
@@ -400,7 +400,7 @@
{
size_t size = 8;
- pw_TokenizeToBufferTest_Requires8(buffer_, &size);
+ pw_tokenizer_ToBufferTest_Requires8(buffer_, &size);
constexpr std::array<uint8_t, 8> expected =
ExpectedData<2, 'h', 'i', 13>(TEST_FORMAT_REQUIRES_8);
ASSERT_EQ(expected.size(), size);
@@ -470,7 +470,7 @@
}
TEST_F(TokenizeToCallback, C_SequentialZigZag) {
- pw_TokenizeToCallbackTest_SequentialZigZag(SetMessage);
+ pw_tokenizer_ToCallbackTest_SequentialZigZag(SetMessage);
constexpr std::array<uint8_t, 18> expected =
ExpectedData<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13>(
diff --git a/pw_tokenizer/tokenize_test_c.c b/pw_tokenizer/tokenize_test_c.c
index f012cb8..bf8877a 100644
--- a/pw_tokenizer/tokenize_test_c.c
+++ b/pw_tokenizer/tokenize_test_c.c
@@ -22,8 +22,8 @@
#error "This is a test of C code and must be compiled as C, not C++."
#endif // __cplusplus
-void pw_TokenizeToBufferTest_StringShortFloat(void* buffer,
- size_t* buffer_size) {
+void pw_tokenizer_ToBufferTest_StringShortFloat(void* buffer,
+ size_t* buffer_size) {
char str[] = "1";
PW_TOKENIZE_TO_BUFFER(
buffer, buffer_size, TEST_FORMAT_STRING_SHORT_FLOAT, str, (short)-2, 3.0);
@@ -32,8 +32,8 @@
// This test invokes the tokenization API with a variety of types. To simplify
// validating the encoded data, numbers that are sequential when zig-zag encoded
// are used as arguments.
-void pw_TokenizeToBufferTest_SequentialZigZag(void* buffer,
- size_t* buffer_size) {
+void pw_tokenizer_ToBufferTest_SequentialZigZag(void* buffer,
+ size_t* buffer_size) {
PW_TOKENIZE_TO_BUFFER(buffer,
buffer_size,
TEST_FORMAT_SEQUENTIAL_ZIG_ZAG,
@@ -53,7 +53,7 @@
(signed char)-7);
}
-void pw_TokenizeToCallbackTest_SequentialZigZag(
+void pw_tokenizer_ToCallbackTest_SequentialZigZag(
void (*callback)(const uint8_t* buffer, size_t size)) {
PW_TOKENIZE_TO_CALLBACK(callback,
TEST_FORMAT_SEQUENTIAL_ZIG_ZAG,
@@ -73,6 +73,6 @@
(signed char)-7);
}
-void pw_TokenizeToBufferTest_Requires8(void* buffer, size_t* buffer_size) {
+void pw_tokenizer_ToBufferTest_Requires8(void* buffer, size_t* buffer_size) {
PW_TOKENIZE_TO_BUFFER(buffer, buffer_size, TEST_FORMAT_REQUIRES_8, "hi", -7);
}
diff --git a/pw_tokenizer/tokenize_test_fakes.cc b/pw_tokenizer/tokenize_test_fakes.cc
index 4dcc9aa..4b9655a 100644
--- a/pw_tokenizer/tokenize_test_fakes.cc
+++ b/pw_tokenizer/tokenize_test_fakes.cc
@@ -22,14 +22,14 @@
#if PW_TOKENIZER_CFG_ENABLE_TOKENIZE_TO_GLOBAL_HANDLER
-PW_EXTERN_C void pw_TokenizerHandleEncodedMessage(
+PW_EXTERN_C void pw_tokenizer_HandleEncodedMessage(
const uint8_t encoded_message[], size_t size_bytes) {
PW_UNUSED(encoded_message[0]);
PW_UNUSED(size_bytes);
}
-PW_EXTERN_C void pw_TokenizerHandleEncodedMessageWithPayload(
- pw_TokenizerPayload payload,
+PW_EXTERN_C void pw_tokenizer_HandleEncodedMessageWithPayload(
+ pw_tokenizer_Payload payload,
const uint8_t encoded_message[],
size_t size_bytes) {
PW_UNUSED(payload);
diff --git a/pw_tokenizer/tokenize_to_global_handler.cc b/pw_tokenizer/tokenize_to_global_handler.cc
index ecca3b4..da36a49 100644
--- a/pw_tokenizer/tokenize_to_global_handler.cc
+++ b/pw_tokenizer/tokenize_to_global_handler.cc
@@ -19,9 +19,9 @@
namespace pw {
namespace tokenizer {
-extern "C" void _pw_TokenizeToGlobalHandler(pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
- ...) {
+extern "C" void _pw_tokenizer_ToGlobalHandler(pw_tokenizer_Token token,
+ _pw_tokenizer_ArgTypes types,
+ ...) {
EncodedMessage encoded;
encoded.token = token;
@@ -30,8 +30,8 @@
const size_t encoded_bytes = EncodeArgs(types, args, encoded.args);
va_end(args);
- pw_TokenizerHandleEncodedMessage(reinterpret_cast<const uint8_t*>(&encoded),
- sizeof(encoded.token) + encoded_bytes);
+ pw_tokenizer_HandleEncodedMessage(reinterpret_cast<const uint8_t*>(&encoded),
+ sizeof(encoded.token) + encoded_bytes);
}
} // namespace tokenizer
diff --git a/pw_tokenizer/tokenize_to_global_handler_with_payload.cc b/pw_tokenizer/tokenize_to_global_handler_with_payload.cc
index b04e549..56b6520 100644
--- a/pw_tokenizer/tokenize_to_global_handler_with_payload.cc
+++ b/pw_tokenizer/tokenize_to_global_handler_with_payload.cc
@@ -19,10 +19,10 @@
namespace pw {
namespace tokenizer {
-extern "C" void _pw_TokenizeToGlobalHandlerWithPayload(
- const pw_TokenizerPayload payload,
- pw_TokenizerStringToken token,
- pw_TokenizerArgTypes types,
+extern "C" void _pw_tokenizer_ToGlobalHandlerWithPayload(
+ const pw_tokenizer_Payload payload,
+ pw_tokenizer_Token token,
+ _pw_tokenizer_ArgTypes types,
...) {
EncodedMessage encoded;
encoded.token = token;
@@ -32,7 +32,7 @@
const size_t encoded_bytes = EncodeArgs(types, args, encoded.args);
va_end(args);
- pw_TokenizerHandleEncodedMessageWithPayload(
+ pw_tokenizer_HandleEncodedMessageWithPayload(
payload,
reinterpret_cast<const uint8_t*>(&encoded),
sizeof(encoded.token) + encoded_bytes);