pw_tokenizer: Optional tokenization without args

Allows optionally tokenized fields in protos to be detokenized even if
they're missing arguments.

Change-Id: Iaf977abfb9fe6077871d49fd96ba225724bde422
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/80840
Pigweed-Auto-Submit: Armando Montanez <amontanez@google.com>
Reviewed-by: Wyatt Hepler <hepler@google.com>
Commit-Queue: Auto-Submit <auto-submit@pigweed.google.com.iam.gserviceaccount.com>
diff --git a/pw_tokenizer/py/detokenize_proto_test.py b/pw_tokenizer/py/detokenize_proto_test.py
index c671799..1421dbf 100644
--- a/pw_tokenizer/py/detokenize_proto_test.py
+++ b/pw_tokenizer/py/detokenize_proto_test.py
@@ -42,6 +42,11 @@
         detokenize_fields(_DETOKENIZER, proto)
         self.assertEqual(proto.message, b"Luke, we're gonna have company")
 
+    def test_binary_missing_arguments(self) -> None:
+        proto = TheMessage(message=b'\xDD\xCC\xBB\xAA')
+        detokenize_fields(_DETOKENIZER, proto)
+        self.assertEqual(proto.message, b"Luke, we're gonna have %s")
+
     def test_recursive_binary(self) -> None:
         proto = TheMessage(message=b'\x78\x56\x34\x12')
         detokenize_fields(_DETOKENIZER, proto)
diff --git a/pw_tokenizer/py/pw_tokenizer/proto/__init__.py b/pw_tokenizer/py/pw_tokenizer/proto/__init__.py
index 6c52666..2dd7237 100644
--- a/pw_tokenizer/py/pw_tokenizer/proto/__init__.py
+++ b/pw_tokenizer/py/pw_tokenizer/proto/__init__.py
@@ -36,7 +36,7 @@
     """Decodes data that may be plain text or binary / Base64 tokenized text."""
     # Try detokenizing as binary.
     result = detokenizer.detokenize(data)
-    if result.ok():
+    if result.best_result() is not None:
         # Rather than just returning the detokenized string, continue
         # detokenization in case recursive Base64 detokenization is needed.
         data = str(result).encode()