pw_tokenizer: Make LoadTokenDatabases public

Change-Id: I3c4e9e316c92fe8abb90e21abb48566406d79c64
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/16860
Commit-Queue: Wyatt Hepler <hepler@google.com>
Reviewed-by: Joe Ethier <jethier@google.com>
diff --git a/pw_tokenizer/py/pw_tokenizer/database.py b/pw_tokenizer/py/pw_tokenizer/database.py
index 10563a1..f5ccf9c 100755
--- a/pw_tokenizer/py/pw_tokenizer/database.py
+++ b/pw_tokenizer/py/pw_tokenizer/database.py
@@ -260,8 +260,12 @@
                 _read_strings_from_elf(file, domain))
 
 
-class _LoadTokenDatabases(argparse.Action):
-    """Argparse action that reads tokenize databases from paths or globs."""
+class LoadTokenDatabases(argparse.Action):
+    """Argparse action that reads tokenize databases from paths or globs.
+
+    ELF files may have #domain appended to them to specify a tokenization domain
+    other than the default.
+    """
     def __call__(self, parser, namespace, values, option_string=None):
         databases: List[tokens.Database] = []
         paths: Set[Path] = set()
@@ -295,7 +299,7 @@
         'databases',
         metavar='elf_or_token_database',
         nargs='+',
-        action=_LoadTokenDatabases,
+        action=LoadTokenDatabases,
         help=('ELF or token database files from which to read strings and '
               'tokens. For ELF files, the tokenization domain to read from '
               'may specified after the path as #domain_name (e.g. '
diff --git a/pw_trace_tokenized/py/pw_trace_tokenized/trace_tokenized.py b/pw_trace_tokenized/py/pw_trace_tokenized/trace_tokenized.py
index 917c06f..95bc4c1 100755
--- a/pw_trace_tokenized/py/pw_trace_tokenized/trace_tokenized.py
+++ b/pw_trace_tokenized/py/pw_trace_tokenized/trace_tokenized.py
@@ -171,7 +171,7 @@
     parser.add_argument(
         'databases',
         nargs='+',
-        action=database._LoadTokenDatabases,  # pylint: disable=protected-access
+        action=database.LoadTokenDatabases,
         help='Databases (ELF, binary, or CSV) to use to lookup tokens.')
     parser.add_argument(
         '-i',