pw_tokenizer: Allow database updates from multiple toolchains

Support updating token database files from multiple toolchains.
Previously, the uniqueness of the output file was used to limit
parallelism, which was an unnecessary restriction that resulted in
hard-to-debug Ninja errors. Now, a pool is used to serialize access to
the database file.

Change-Id: Icb853369ebe36d3e7bc57f31417b779afebb4d12
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/23960
Commit-Queue: Anthony DiGirolamo <tonymd@google.com>
Commit-Queue: Keir Mierle <keir@google.com>
Reviewed-by: Anthony DiGirolamo <tonymd@google.com>
Reviewed-by: Keir Mierle <keir@google.com>
diff --git a/pw_tokenizer/database.gni b/pw_tokenizer/database.gni
index a1261d0..8b7b894 100644
--- a/pw_tokenizer/database.gni
+++ b/pw_tokenizer/database.gni
@@ -87,20 +87,22 @@
     not_needed([ "_domain" ])
   }
 
+  # Restrict parallelism for updating this database file to one thread. This
+  # makes it safe to update it from multiple toolchains.
+  pool("$target_name._pool") {
+    depth = 1
+  }
+
   pw_python_action(target_name) {
     script = "$dir_pw_tokenizer/py/pw_tokenizer/database.py"
+    pool = ":$target_name._pool"
 
     inputs = _input_databases
 
     if (_create == "") {
       args = [ "add" ]
       inputs += [ _database ]
-
-      # Since the output file is in the source tree, create a corresponding
-      # stamp file in the output directory that is independent of the toolchain.
-      # That way, updating the database from multiple toolchains is an error.
-      stamp =
-          "$root_build_dir/" + rebase_path(invoker.database, "//") + ".update"
+      stamp = true
     } else {
       args = [
         "create",