pw_tokenizer: Deprecate tokenizer buffer size config

pw_tokenizer no longer has any macros that stack-allocate a tokenization
buffer, so it does not need a config macro for this buffer size.

- Create a new config macro for pw_log_tokenized's stack-allocated
  buffer. Have it default to PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES
  for backwards compatibility.
- Remove the kDefaultBase64EncodedBuffer size from pw_tokenizer, since
  there is no default buffer size in pw_tokenizer.
- Introduce pw_log_tokenized/base64.h that provides a function for
  encoding the pw_log_tokenized buffer as Base64.
- Switch base64_over_hdlc and pw_log_zephyr to use pw::InlineString for
  Base64 encoding.

Change-Id: I0205598da51150ba31001889bd2d822329261887
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/163257
Pigweed-Auto-Submit: Wyatt Hepler <hepler@google.com>
Reviewed-by: Erik Gilling <konkers@google.com>
Commit-Queue: Auto-Submit <auto-submit@pigweed-service-accounts.iam.gserviceaccount.com>
diff --git a/docs/BUILD.gn b/docs/BUILD.gn
index 117dd1c..582e3ca 100644
--- a/docs/BUILD.gn
+++ b/docs/BUILD.gn
@@ -147,6 +147,7 @@
   "$dir_pw_i2c_linux/public/pw_i2c_linux/initiator.h",
   "$dir_pw_log_tokenized/public/pw_log_tokenized/handler.h",
   "$dir_pw_log_tokenized/public/pw_log_tokenized/metadata.h",
+  "$dir_pw_log_tokenized/public/pw_log_tokenized/base64.h",
   "$dir_pw_protobuf/public/pw_protobuf/find.h",
   "$dir_pw_random/public/pw_random/random.h",
   "$dir_pw_rpc/public/pw_rpc/internal/config.h",
diff --git a/docs/Doxyfile b/docs/Doxyfile
index 8132a75..8b7d237 100644
--- a/docs/Doxyfile
+++ b/docs/Doxyfile
@@ -2391,7 +2391,8 @@
                          PW_EXTERN_C_START= \
                          PW_LOCKS_EXCLUDED(...)= \
                          PW_EXCLUSIVE_LOCKS_REQUIRED(...)= \
-                         PW_GUARDED_BY(...)=
+                         PW_GUARDED_BY(...)= \
+                         PW_EXCLUDE_FROM_DOXYGEN=1
 
 # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
 # tag can be used to specify a list of macro names that should be expanded. The
diff --git a/pw_log_tokenized/BUILD.bazel b/pw_log_tokenized/BUILD.bazel
index 8c65bd8..855a8f4 100644
--- a/pw_log_tokenized/BUILD.bazel
+++ b/pw_log_tokenized/BUILD.bazel
@@ -96,11 +96,20 @@
 )
 
 pw_cc_library(
-    name = "base64_over_hdlc",
-    srcs = ["base64_over_hdlc.cc"],
-    hdrs = ["public/pw_log_tokenized/base64_over_hdlc.h"],
+    name = "base64",
+    hdrs = ["public/pw_log_tokenized/base64.h"],
     includes = ["public"],
     deps = [
+        ":headers",  # Only config.h is needed
+        "//pw_tokenizer:base64",
+    ],
+)
+
+pw_cc_library(
+    name = "base64_over_hdlc",
+    srcs = ["base64_over_hdlc.cc"],
+    deps = [
+        ":base64",
         ":handler_facade",
         "//pw_hdlc",
         "//pw_stream:sys_io_stream",
@@ -116,6 +125,7 @@
         "pw_log_tokenized_private/test_utils.h",
     ],
     deps = [
+        ":base64",
         ":headers",
         "//pw_unit_test",
     ],
diff --git a/pw_log_tokenized/BUILD.gn b/pw_log_tokenized/BUILD.gn
index 17acf3a..1e98a9d 100644
--- a/pw_log_tokenized/BUILD.gn
+++ b/pw_log_tokenized/BUILD.gn
@@ -106,6 +106,7 @@
   public_configs = [ ":public_include_path" ]
   public_deps = [
     "$dir_pw_log:facade",
+    "$dir_pw_tokenizer:config",
     pw_log_tokenized_CONFIG,
   ]
   public = [ "public/pw_log_tokenized/config.h" ]
@@ -122,13 +123,21 @@
   }
 }
 
+pw_source_set("base64") {
+  public_configs = [ ":public_include_path" ]
+  public = [ "public/pw_log_tokenized/base64.h" ]
+  public_deps = [
+    ":config",
+    "$dir_pw_tokenizer:base64",
+  ]
+}
+
 # This target provides a backend for pw_tokenizer that encodes tokenized logs as
 # Base64, encodes them into HDLC frames, and writes them over sys_io.
 pw_source_set("base64_over_hdlc") {
-  public_configs = [ ":public_include_path" ]
-  public = [ "public/pw_log_tokenized/base64_over_hdlc.h" ]
   sources = [ "base64_over_hdlc.cc" ]
   deps = [
+    ":base64",
     ":handler.facade",
     "$dir_pw_hdlc:encoder",
     "$dir_pw_stream:sys_io_stream",
@@ -151,6 +160,7 @@
     "pw_log_tokenized_private/test_utils.h",
   ]
   deps = [
+    ":base64",
     ":headers",
     dir_pw_preprocessor,
   ]
diff --git a/pw_log_tokenized/CMakeLists.txt b/pw_log_tokenized/CMakeLists.txt
index 6397a67..69b9690 100644
--- a/pw_log_tokenized/CMakeLists.txt
+++ b/pw_log_tokenized/CMakeLists.txt
@@ -24,6 +24,7 @@
     public
   PUBLIC_DEPS
     pw_log.facade
+    pw_tokenizer.config
     ${pw_log_tokenized_CONFIG}
 )
 
@@ -86,21 +87,27 @@
     pw_preprocessor
 )
 
+pw_add_library(pw_log_tokenized.base64 INTERFACE
+  HEADERS
+    public/pw_log_tokenized/base64.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_log_tokenized.config
+    pw_tokenizer.base64
+)
+
 # This target provides a backend for pw_tokenizer that encodes tokenized logs as
 # Base64, encodes them into HDLC frames, and writes them over sys_io.
 pw_add_library(pw_log_tokenized.base64_over_hdlc STATIC
-  HEADERS
-    public/pw_log_tokenized/base64_over_hdlc.h
-  PUBLIC_INCLUDES
-    public
   SOURCES
     base64_over_hdlc.cc
   PRIVATE_DEPS
     pw_hdlc.encoder
+    pw_log_tokenized.base64
     pw_log_tokenized.handler
     pw_span
     pw_stream.sys_io_stream
-    pw_tokenizer.base64
 )
 
 pw_add_test(pw_log_tokenized.log_tokenized_test
@@ -109,6 +116,7 @@
     log_tokenized_test_c.c
     pw_log_tokenized_private/test_utils.h
   PRIVATE_DEPS
+    pw_log_tokenized.base64
     pw_log_tokenized._headers
     pw_preprocessor
   GROUPS
diff --git a/pw_log_tokenized/base64_over_hdlc.cc b/pw_log_tokenized/base64_over_hdlc.cc
index 29e925a..997f6cf 100644
--- a/pw_log_tokenized/base64_over_hdlc.cc
+++ b/pw_log_tokenized/base64_over_hdlc.cc
@@ -15,17 +15,19 @@
 // This function serves as a backend for pw_tokenizer / pw_log_tokenized that
 // encodes tokenized logs as Base64 and writes them using HDLC.
 
-#include "pw_log_tokenized/base64_over_hdlc.h"
-
 #include "pw_hdlc/encoder.h"
+#include "pw_log_tokenized/base64.h"
 #include "pw_log_tokenized/handler.h"
 #include "pw_span/span.h"
 #include "pw_stream/sys_io_stream.h"
+#include "pw_string/string.h"
 #include "pw_tokenizer/base64.h"
 
 namespace pw::log_tokenized {
 namespace {
 
+inline constexpr int kBase64LogHdlcAddress = 1;
+
 stream::SysIoWriter writer;
 
 }  // namespace
@@ -36,15 +38,12 @@
     const uint8_t log_buffer[],
     size_t size_bytes) {
   // Encode the tokenized message as Base64.
-  char base64_buffer[tokenizer::kDefaultBase64EncodedBufferSize];
-  const size_t base64_bytes = tokenizer::PrefixedBase64Encode(
-      span(log_buffer, size_bytes), base64_buffer);
-  base64_buffer[base64_bytes] = '\0';
+  const pw::InlineBasicString base64_string =
+      PrefixedBase64Encode(log_buffer, size_bytes);
 
   // HDLC-encode the Base64 string via a SysIoWriter.
-  hdlc::WriteUIFrame(PW_LOG_TOKENIZED_BASE64_LOG_HDLC_ADDRESS,
-                     as_bytes(span(base64_buffer, base64_bytes)),
-                     writer);
+  hdlc::WriteUIFrame(
+      kBase64LogHdlcAddress, as_bytes(span(base64_string)), writer);
 }
 
 }  // namespace pw::log_tokenized
diff --git a/pw_log_tokenized/docs.rst b/pw_log_tokenized/docs.rst
index 77401dd..cd5a105 100644
--- a/pw_log_tokenized/docs.rst
+++ b/pw_log_tokenized/docs.rst
@@ -175,6 +175,11 @@
         token_buffer.size());
   }
 
+The binary tokenized message may be encoded in the :ref:`prefixed Base64 format
+<module-pw_tokenizer-base64-format>` with the following function:
+
+.. doxygenfunction:: PrefixedBase64Encode(span<const std::byte>)
+
 Build targets
 -------------
 The GN build for ``pw_log_tokenized`` has two targets: ``pw_log_tokenized`` and
diff --git a/pw_log_tokenized/log_tokenized.cc b/pw_log_tokenized/log_tokenized.cc
index 25c00d9..6a1ba8c 100644
--- a/pw_log_tokenized/log_tokenized.cc
+++ b/pw_log_tokenized/log_tokenized.cc
@@ -16,6 +16,7 @@
 
 #include <cstdarg>
 
+#include "pw_log_tokenized/config.h"
 #include "pw_log_tokenized/handler.h"
 #include "pw_tokenizer/encode_args.h"
 
@@ -26,7 +27,8 @@
     ...) {
   va_list args;
   va_start(args, types);
-  pw::tokenizer::EncodedMessage<> encoded_message(token, types, args);
+  pw::tokenizer::EncodedMessage<PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES>
+      encoded_message(token, types, args);
   va_end(args);
 
   pw_log_tokenized_HandleLog(
diff --git a/pw_log_tokenized/log_tokenized_test.cc b/pw_log_tokenized/log_tokenized_test.cc
index a8d4759..51c90e3 100644
--- a/pw_log_tokenized/log_tokenized_test.cc
+++ b/pw_log_tokenized/log_tokenized_test.cc
@@ -28,6 +28,7 @@
 #include "pw_log_tokenized/log_tokenized.h"
 
 #include "gtest/gtest.h"
+#include "pw_log_tokenized/base64.h"
 #include "pw_log_tokenized_private/test_utils.h"
 
 namespace pw::log_tokenized {
@@ -44,6 +45,22 @@
     PW_TOKENIZER_STRING_TOKEN(PW_LOG_MODULE_NAME) &
     ((1u << PW_LOG_TOKENIZED_MODULE_BITS) - 1);
 
+TEST(LogTokenized, Base64) {
+  constexpr uint8_t kBinary[6]{1, 2, 3, 4, 5, 6};
+  constexpr const char* kBase64Expected = "$AQIDBAUG";  // calculated in Python
+
+  InlineBasicString result_1 = PrefixedBase64Encode(as_bytes(span(kBinary)));
+  EXPECT_EQ(result_1, kBase64Expected);
+  EXPECT_EQ(result_1.capacity(), kBase64EncodedBufferSizeBytes);
+
+  InlineBasicString result_2 = PrefixedBase64Encode(kBinary, sizeof(kBinary));
+  EXPECT_EQ(result_2, kBase64Expected);
+
+  InlineBasicString result_3 = PrefixedBase64Encode(
+      reinterpret_cast<const std::byte*>(kBinary), sizeof(kBinary));
+  EXPECT_EQ(result_3, kBase64Expected);
+}
+
 TEST(LogTokenized, LogMetadata_LevelTooLarge_Clamps) {
   auto check_metadata = [] {
     Metadata metadata = Metadata(last_log.metadata);
diff --git a/pw_log_tokenized/public/pw_log_tokenized/base64.h b/pw_log_tokenized/public/pw_log_tokenized/base64.h
new file mode 100644
index 0000000..89e7a19
--- /dev/null
+++ b/pw_log_tokenized/public/pw_log_tokenized/base64.h
@@ -0,0 +1,51 @@
+// Copyright 2023 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include <cstddef>
+#include <type_traits>
+
+#include "pw_log_tokenized/config.h"
+#include "pw_tokenizer/base64.h"
+
+namespace pw::log_tokenized {
+
+// Minimum capacity for a string that to hold the Base64-encoded version of a
+// PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES tokenized message. This is the
+// capacity needed to encode to a `pw::InlineString` and does not include a null
+// terminator.
+inline constexpr size_t kBase64EncodedBufferSizeBytes =
+    tokenizer::Base64EncodedBufferSize(kEncodingBufferSizeBytes);
+
+/// Encodes a binary tokenized log in the prefixed Base64 format. Calls
+/// @cpp_func{pw::tokenizer::PrefixedBase64Encode} for a string sized to fit a
+/// `kEncodingBufferSizeBytes` tokenized log.
+inline InlineString<kBase64EncodedBufferSizeBytes> PrefixedBase64Encode(
+    span<const std::byte> binary_message) {
+  return tokenizer::PrefixedBase64Encode<kEncodingBufferSizeBytes>(
+      binary_message);
+}
+
+#ifndef PW_EXCLUDE_FROM_DOXYGEN  // Doxygen fails to parse this, so skip it.
+
+template <typename T,
+          typename = std::enable_if_t<sizeof(T) == sizeof(std::byte)>>
+inline InlineString<kBase64EncodedBufferSizeBytes> PrefixedBase64Encode(
+    const T* log_buffer, size_t size_bytes) {
+  return PrefixedBase64Encode(as_bytes(span(log_buffer, size_bytes)));
+}
+
+#endif  // PW_EXCLUDE_FROM_DOXYGEN
+
+}  // namespace pw::log_tokenized
diff --git a/pw_log_tokenized/public/pw_log_tokenized/base64_over_hdlc.h b/pw_log_tokenized/public/pw_log_tokenized/base64_over_hdlc.h
deleted file mode 100644
index af31532..0000000
--- a/pw_log_tokenized/public/pw_log_tokenized/base64_over_hdlc.h
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2020 The Pigweed Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//     https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-#pragma once
-
-// The HDLC address to which to write Base64-encoded tokenized logs.
-#ifndef PW_LOG_TOKENIZED_BASE64_LOG_HDLC_ADDRESS
-#define PW_LOG_TOKENIZED_BASE64_LOG_HDLC_ADDRESS 1
-#endif  // PW_LOG_TOKENIZED_BASE64_LOG_HDLC_ADDRESS
diff --git a/pw_log_tokenized/public/pw_log_tokenized/config.h b/pw_log_tokenized/public/pw_log_tokenized/config.h
index 6475981..c1060b5 100644
--- a/pw_log_tokenized/public/pw_log_tokenized/config.h
+++ b/pw_log_tokenized/public/pw_log_tokenized/config.h
@@ -17,6 +17,22 @@
 
 #include "pw_log/levels.h"
 #include "pw_log/options.h"
+#include "pw_tokenizer/config.h"
+
+// The size of the stack-allocated argument encoding buffer to use by default.
+// A buffer of this size is allocated and used for the 4-byte token and for
+// encoding all arguments. It must be at least large enough for the token (4
+// bytes).
+//
+// This buffer does not need to be large to accommodate a good number of
+// tokenized string arguments. Integer arguments are usually encoded smaller
+// than their native size (e.g. 1 or 2 bytes for smaller numbers). All floating
+// point types are encoded as four bytes. Null-terminated strings are encoded
+// 1:1 in size, however, and can quickly fill up this buffer.
+#ifndef PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES
+#define PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES \
+  PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES
+#endif  // PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES
 
 // This macro takes the PW_LOG format string and optionally transforms it. By
 // default, pw_log_tokenized specifies three fields as key-value pairs.
@@ -68,3 +84,17 @@
 static_assert((PW_LOG_TOKENIZED_LEVEL_BITS + PW_LOG_TOKENIZED_LINE_BITS +
                PW_LOG_TOKENIZED_FLAG_BITS + PW_LOG_TOKENIZED_MODULE_BITS) == 32,
               "Log metadata fields must use 32 bits");
+
+#ifdef __cplusplus
+
+#include <cstddef>
+
+namespace pw::log_tokenized {
+
+// C++ constant for the encoding buffer size. Use this instead of the macro.
+inline constexpr size_t kEncodingBufferSizeBytes =
+    PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES;
+
+}  // namespace pw::log_tokenized
+
+#endif  // __cplusplus
diff --git a/pw_log_zephyr/CMakeLists.txt b/pw_log_zephyr/CMakeLists.txt
index 408f184..94e6444 100644
--- a/pw_log_zephyr/CMakeLists.txt
+++ b/pw_log_zephyr/CMakeLists.txt
@@ -40,7 +40,7 @@
   SOURCES
     pw_log_zephyr_tokenized_handler.cc
   PUBLIC_DEPS
-    pw_log_tokenized.config
+    pw_log_tokenized.base64
     pw_log_tokenized.handler
     pw_span
     pw_sync.interrupt_spin_lock
diff --git a/pw_log_zephyr/pw_log_zephyr_tokenized_handler.cc b/pw_log_zephyr/pw_log_zephyr_tokenized_handler.cc
index eeef915..8e4fd3c 100644
--- a/pw_log_zephyr/pw_log_zephyr_tokenized_handler.cc
+++ b/pw_log_zephyr/pw_log_zephyr_tokenized_handler.cc
@@ -39,8 +39,7 @@
 
   // Encode the tokenized message as Base64.
   const InlineBasicString base64_string =
-      tokenizer::PrefixedBase64Encode<log_tokenized::kEncodingBufferSizeBytes>(
-          span(log_buffer, size_bytes));
+      log_tokenized::PrefixedBase64Encode(log_buffer, size_bytes);
 
   if (base64_string.empty()) {
     return;
diff --git a/pw_tokenizer/base64_test.cc b/pw_tokenizer/base64_test.cc
index fee5a67..babc875 100644
--- a/pw_tokenizer/base64_test.cc
+++ b/pw_tokenizer/base64_test.cc
@@ -97,7 +97,7 @@
 
 TEST_F(PrefixedBase64, Encode_InlineString) {
   for (auto& [binary, base64] : kTestData) {
-    EXPECT_EQ(base64, PrefixedBase64Encode(binary));
+    EXPECT_EQ(base64, PrefixedBase64Encode<64>(binary));
   }
 }
 
diff --git a/pw_tokenizer/guides.rst b/pw_tokenizer/guides.rst
index 297d022..91761c8 100644
--- a/pw_tokenizer/guides.rst
+++ b/pw_tokenizer/guides.rst
@@ -331,7 +331,7 @@
                                           ...) {
      va_list args;
      va_start(args, types);
-     pw::tokenizer::EncodedMessage<> encoded_message(token, types, args);
+     pw::tokenizer::EncodedMessage<kLogBufferSize> encoded_message(token, types, args);
      va_end(args);
 
      HandleTokenizedMessage(metadata, encoded_message);
diff --git a/pw_tokenizer/public/pw_tokenizer/base64.h b/pw_tokenizer/public/pw_tokenizer/base64.h
index 36acaf2..5cca014 100644
--- a/pw_tokenizer/public/pw_tokenizer/base64.h
+++ b/pw_tokenizer/public/pw_tokenizer/base64.h
@@ -91,11 +91,6 @@
   return Base64EncodedStringSize(message_size) + sizeof('\0');
 }
 
-// The minimum buffer size that can hold a tokenized message that is
-// PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES long encoded as prefixed Base64.
-inline constexpr size_t kDefaultBase64EncodedBufferSize =
-    Base64EncodedBufferSize(PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES);
-
 // Encodes a binary tokenized message as prefixed Base64 with a null terminator.
 // Returns the encoded string length (excluding the null terminator). Returns 0
 // if the buffer is too small. Always null terminates if the output buffer is
@@ -128,8 +123,7 @@
 // Encodes a binary tokenized message as prefixed Base64 to a pw::InlineString.
 // The pw::InlineString is sized to fit messages up to
 // kMaxBinaryMessageSizeBytes long. Asserts if the message is larger.
-template <size_t kMaxBinaryMessageSizeBytes =
-              PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES>
+template <size_t kMaxBinaryMessageSizeBytes>
 auto PrefixedBase64Encode(span<const std::byte> binary_message) {
   static_assert(kMaxBinaryMessageSizeBytes >= 1, "Messages cannot be empty");
   InlineString<Base64EncodedStringSize(kMaxBinaryMessageSizeBytes)> string(
@@ -138,8 +132,7 @@
   return string;
 }
 
-template <size_t kMaxBinaryMessageSizeBytes =
-              PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES>
+template <size_t kMaxBinaryMessageSizeBytes>
 auto PrefixedBase64Encode(span<const uint8_t> binary_message) {
   return PrefixedBase64Encode<kMaxBinaryMessageSizeBytes>(
       as_bytes(binary_message));
diff --git a/pw_tokenizer/public/pw_tokenizer/config.h b/pw_tokenizer/public/pw_tokenizer/config.h
index 7a1ff27..b9d9fa7 100644
--- a/pw_tokenizer/public/pw_tokenizer/config.h
+++ b/pw_tokenizer/public/pw_tokenizer/config.h
@@ -53,17 +53,10 @@
 #define PW_TOKENIZER_CFG_C_HASH_LENGTH 128
 #endif  // PW_TOKENIZER_CFG_C_HASH_LENGTH
 
-// The size of the stack-allocated argument encoding buffer to use by default.
-// This only affects tokenization macros that use the
-// pw::tokenizer::EncodedMessage class. A buffer of this size is allocated and
-// used for the 4-byte token and for encoding all arguments. It must be at least
-// large enough for the token (4 bytes).
-//
-// This buffer does not need to be large to accommodate a good number of
-// tokenized string arguments. Integer arguments are usually encoded smaller
-// than their native size (e.g. 1 or 2 bytes for smaller numbers). All floating
-// point types are encoded as four bytes. Null-terminated strings are encoded
-// 1:1 in size.
+// PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES is deprecated. It is used as the
+// default value for pw_log_tokenized's
+// PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES. This value should not be
+// configured; set PW_LOG_TOKENIZED_ENCODING_BUFFER_SIZE_BYTES instead.
 #ifndef PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES
 #define PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES 52
 #endif  // PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES
diff --git a/pw_tokenizer/public/pw_tokenizer/encode_args.h b/pw_tokenizer/public/pw_tokenizer/encode_args.h
index 07e6e31..3d7a219 100644
--- a/pw_tokenizer/public/pw_tokenizer/encode_args.h
+++ b/pw_tokenizer/public/pw_tokenizer/encode_args.h
@@ -82,10 +82,8 @@
                   va_list args,
                   span<std::byte> output);
 
-/// Encodes a tokenized message to a fixed size buffer. By default, the buffer
-/// size is set by the @c_macro{PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES}
-/// config macro. This class is used to encode tokenized messages passed in from
-/// tokenization macros.
+/// Encodes a tokenized message to a fixed size buffer. This class is used to
+/// encode tokenized messages passed in from tokenization macros.
 ///
 /// To use `pw::tokenizer::EncodedMessage`, construct it with the token,
 /// argument types, and `va_list` from the variadic arguments:
@@ -104,7 +102,7 @@
 ///     SendLogMessage(encoded_message);  // EncodedMessage converts to span
 ///   }
 /// @endcode
-template <size_t kMaxSizeBytes = PW_TOKENIZER_CFG_ENCODING_BUFFER_SIZE_BYTES>
+template <size_t kMaxSizeBytes>
 class EncodedMessage {
  public:
   // Encodes a tokenized message to an internal buffer.