pw_allocator: Move block allocators to separate files

This CL separates out the individual block allocators into their own
files and build targets, ahead of the addition of a
BucketBlockAllocator.

The most substantial part of this change was the reworking of the unit
tests to be more explicitly declared instead of relying on macros for
code generation.

Change-Id: I4f200eb3d3282de9bd3e367d4a0f95610e913a4b
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/198154
Reviewed-by: Taylor Cramer <cramertj@google.com>
Commit-Queue: Aaron Green <aarongreen@google.com>
diff --git a/docs/BUILD.gn b/docs/BUILD.gn
index 0d02947..6b7fd4a 100644
--- a/docs/BUILD.gn
+++ b/docs/BUILD.gn
@@ -125,16 +125,20 @@
   "$dir_pw_alignment/public/pw_alignment/alignment.h",
   "$dir_pw_allocator/public/pw_allocator/allocator.h",
   "$dir_pw_allocator/public/pw_allocator/allocator_as_pool.h",
+  "$dir_pw_allocator/public/pw_allocator/best_fit_block_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/block.h",
-  "$dir_pw_allocator/public/pw_allocator/block_allocator.h",
+  "$dir_pw_allocator/public/pw_allocator/block_allocator_base.h",
   "$dir_pw_allocator/public/pw_allocator/buddy_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/buffer.h",
   "$dir_pw_allocator/public/pw_allocator/bump_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/capability.h",
   "$dir_pw_allocator/public/pw_allocator/chunk_pool.h",
   "$dir_pw_allocator/public/pw_allocator/deallocator.h",
+  "$dir_pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/fallback_allocator.h",
+  "$dir_pw_allocator/public/pw_allocator/first_fit_block_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/fuzzing.h",
+  "$dir_pw_allocator/public/pw_allocator/last_fit_block_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/layout.h",
   "$dir_pw_allocator/public/pw_allocator/libc_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/metrics.h",
@@ -147,6 +151,7 @@
   "$dir_pw_allocator/public/pw_allocator/tracking_allocator.h",
   "$dir_pw_allocator/public/pw_allocator/typed_pool.h",
   "$dir_pw_allocator/public/pw_allocator/unique_ptr.h",
+  "$dir_pw_allocator/public/pw_allocator/worst_fit_block_allocator.h",
   "$dir_pw_analog/public/pw_analog/analog_input.h",
   "$dir_pw_analog/public/pw_analog/microvolt_input.h",
   "$dir_pw_async/public/pw_async/context.h",
diff --git a/pw_allocator/BUILD.bazel b/pw_allocator/BUILD.bazel
index d5e2ea3..fd1735e 100644
--- a/pw_allocator/BUILD.bazel
+++ b/pw_allocator/BUILD.bazel
@@ -55,6 +55,13 @@
 )
 
 cc_library(
+    name = "best_fit_block_allocator",
+    hdrs = ["public/pw_allocator/best_fit_block_allocator.h"],
+    includes = ["public"],
+    deps = [":block_allocator_base"],
+)
+
+cc_library(
     name = "block",
     srcs = ["block.cc"],
     hdrs = [
@@ -72,18 +79,31 @@
     ],
 )
 
+# TODO(b/326509341): Remove when all customers depend on the correct targets.
 cc_library(
     name = "block_allocator",
-    srcs = ["block_allocator.cc"],
-    hdrs = [
-        "public/pw_allocator/block_allocator.h",
+    hdrs = ["public/pw_allocator/block_allocator.h"],
+    includes = ["public"],
+    deps = [
+        ":best_fit_block_allocator",
+        ":dual_first_fit_block_allocator",
+        ":first_fit_block_allocator",
+        ":last_fit_block_allocator",
+        ":worst_fit_block_allocator",
     ],
+)
+
+# TODO(b/326509341): Rename when all customers depend on the correct targets.
+cc_library(
+    name = "block_allocator_base",
+    srcs = ["block_allocator.cc"],
+    hdrs = ["public/pw_allocator/block_allocator_base.h"],
+    includes = ["public"],
     deps = [
         ":allocator",
         ":block",
-        ":buffer",
         "//pw_assert",
-        "//pw_bytes",
+        "//pw_bytes:alignment",
         "//pw_result",
         "//pw_status",
     ],
@@ -198,6 +218,13 @@
 )
 
 cc_library(
+    name = "dual_first_fit_block_allocator",
+    hdrs = ["public/pw_allocator/dual_first_fit_block_allocator.h"],
+    includes = ["public"],
+    deps = [":block_allocator_base"],
+)
+
+cc_library(
     name = "fallback_allocator",
     hdrs = [
         "public/pw_allocator/fallback_allocator.h",
@@ -213,6 +240,13 @@
 )
 
 cc_library(
+    name = "first_fit_block_allocator",
+    hdrs = ["public/pw_allocator/first_fit_block_allocator.h"],
+    includes = ["public"],
+    deps = [":block_allocator_base"],
+)
+
+cc_library(
     name = "freelist",
     srcs = [
         "freelist.cc",
@@ -245,6 +279,13 @@
 )
 
 cc_library(
+    name = "last_fit_block_allocator",
+    hdrs = ["public/pw_allocator/last_fit_block_allocator.h"],
+    includes = ["public"],
+    deps = [":block_allocator_base"],
+)
+
+cc_library(
     name = "libc_allocator",
     srcs = [
         "libc_allocator.cc",
@@ -321,6 +362,13 @@
     ],
 )
 
+cc_library(
+    name = "worst_fit_block_allocator",
+    hdrs = ["public/pw_allocator/worst_fit_block_allocator.h"],
+    includes = ["public"],
+    deps = [":block_allocator_base"],
+)
+
 # Test support
 
 cc_library(
@@ -333,8 +381,8 @@
     deps = [
         ":allocator",
         ":block",
-        ":block_allocator",
         ":buffer",
+        ":first_fit_block_allocator",
         ":tracking_allocator",
         "//pw_assert",
         "//pw_bytes",
@@ -345,6 +393,26 @@
 )
 
 cc_library(
+    name = "block_allocator_testing",
+    testonly = True,
+    srcs = [
+        "block_allocator_testing.cc",
+    ],
+    hdrs = [
+        "public/pw_allocator/block_allocator_testing.h",
+    ],
+    includes = ["public"],
+    deps = [
+        ":block",
+        ":block_allocator",
+        "//pw_assert",
+        "//pw_bytes:alignment",
+        "//pw_status",
+        "//pw_unit_test",
+    ],
+)
+
+cc_library(
     name = "test_harness",
     testonly = True,
     srcs = [
@@ -406,6 +474,16 @@
 )
 
 pw_cc_test(
+    name = "best_fit_block_allocator_test",
+    srcs = ["best_fit_block_allocator_test.cc"],
+    deps = [
+        ":best_fit_block_allocator",
+        ":block_allocator_testing",
+        "//pw_unit_test",
+    ],
+)
+
+pw_cc_test(
     name = "block_test",
     srcs = [
         "block_test.cc",
@@ -418,20 +496,6 @@
 )
 
 pw_cc_test(
-    name = "block_allocator_test",
-    srcs = [
-        "block_allocator_test.cc",
-    ],
-    deps = [
-        ":block",
-        ":block_allocator",
-        ":buffer",
-        "//pw_bytes",
-        "//pw_unit_test",
-    ],
-)
-
-pw_cc_test(
     name = "buddy_allocator_test",
     srcs = [
         "buddy_allocator_test.cc",
@@ -477,6 +541,16 @@
 )
 
 pw_cc_test(
+    name = "dual_first_fit_block_allocator_test",
+    srcs = ["dual_first_fit_block_allocator_test.cc"],
+    deps = [
+        ":block_allocator_testing",
+        ":dual_first_fit_block_allocator",
+        "//pw_unit_test",
+    ],
+)
+
+pw_cc_test(
     name = "fallback_allocator_test",
     srcs = [
         "fallback_allocator_test.cc",
@@ -490,6 +564,17 @@
 )
 
 pw_cc_test(
+    name = "first_fit_block_allocator_test",
+    srcs = ["first_fit_block_allocator_test.cc"],
+    deps = [
+        ":block_allocator_testing",
+        ":buffer",
+        ":first_fit_block_allocator",
+        "//pw_unit_test",
+    ],
+)
+
+pw_cc_test(
     name = "freelist_test",
     srcs = [
         "freelist_test.cc",
@@ -513,6 +598,16 @@
 )
 
 pw_cc_test(
+    name = "last_fit_block_allocator_test",
+    srcs = ["last_fit_block_allocator_test.cc"],
+    deps = [
+        ":block_allocator_testing",
+        ":last_fit_block_allocator",
+        "//pw_unit_test",
+    ],
+)
+
+pw_cc_test(
     name = "libc_allocator_test",
     srcs = [
         "libc_allocator_test.cc",
@@ -590,6 +685,16 @@
     ],
 )
 
+pw_cc_test(
+    name = "worst_fit_block_allocator_test",
+    srcs = ["worst_fit_block_allocator_test.cc"],
+    deps = [
+        ":block_allocator_testing",
+        ":worst_fit_block_allocator",
+        "//pw_unit_test",
+    ],
+)
+
 # Docs
 
 cc_library(
diff --git a/pw_allocator/BUILD.gn b/pw_allocator/BUILD.gn
index 7d34c25..b1eea8a 100644
--- a/pw_allocator/BUILD.gn
+++ b/pw_allocator/BUILD.gn
@@ -58,6 +58,12 @@
   sources = [ "allocator_as_pool.cc" ]
 }
 
+pw_source_set("best_fit_block_allocator") {
+  public_configs = [ ":default_config" ]
+  public = [ "public/pw_allocator/best_fit_block_allocator.h" ]
+  public_deps = [ ":block_allocator_base" ]
+}
+
 pw_source_set("block") {
   public_configs = [ ":default_config" ]
   public = [ "public/pw_allocator/block.h" ]
@@ -77,10 +83,24 @@
   sources = [ "block.cc" ]
 }
 
+# TODO(b/326509341): Remove when all customers depend on the correct targets.
 pw_source_set("block_allocator") {
   public_configs = [ ":default_config" ]
   public = [ "public/pw_allocator/block_allocator.h" ]
   public_deps = [
+    ":best_fit_block_allocator",
+    ":dual_first_fit_block_allocator",
+    ":first_fit_block_allocator",
+    ":last_fit_block_allocator",
+    ":worst_fit_block_allocator",
+  ]
+}
+
+# TODO(b/326509341): Rename when all customers depend on the correct targets.
+pw_source_set("block_allocator_base") {
+  public_configs = [ ":default_config" ]
+  public = [ "public/pw_allocator/block_allocator_base.h" ]
+  public_deps = [
     ":allocator",
     ":block",
     dir_pw_bytes,
@@ -178,6 +198,12 @@
   ]
 }
 
+pw_source_set("dual_first_fit_block_allocator") {
+  public_configs = [ ":default_config" ]
+  public = [ "public/pw_allocator/dual_first_fit_block_allocator.h" ]
+  public_deps = [ ":block_allocator_base" ]
+}
+
 pw_source_set("fallback_allocator") {
   public_configs = [ ":default_config" ]
   public = [ "public/pw_allocator/fallback_allocator.h" ]
@@ -191,6 +217,12 @@
   ]
 }
 
+pw_source_set("first_fit_block_allocator") {
+  public_configs = [ ":default_config" ]
+  public = [ "public/pw_allocator/first_fit_block_allocator.h" ]
+  public_deps = [ ":block_allocator_base" ]
+}
+
 pw_source_set("freelist") {
   public_configs = [ ":default_config" ]
   public = [ "public/pw_allocator/freelist.h" ]
@@ -217,6 +249,12 @@
   sources = [ "freelist_heap.cc" ]
 }
 
+pw_source_set("last_fit_block_allocator") {
+  public_configs = [ ":default_config" ]
+  public = [ "public/pw_allocator/last_fit_block_allocator.h" ]
+  public_deps = [ ":block_allocator_base" ]
+}
+
 pw_source_set("libc_allocator") {
   public_configs = [ ":default_config" ]
   public = [ "public/pw_allocator/libc_allocator.h" ]
@@ -274,6 +312,12 @@
   ]
 }
 
+pw_source_set("worst_fit_block_allocator") {
+  public_configs = [ ":default_config" ]
+  public = [ "public/pw_allocator/worst_fit_block_allocator.h" ]
+  public_deps = [ ":block_allocator_base" ]
+}
+
 # Test support
 
 pw_source_set("testing") {
@@ -281,8 +325,8 @@
   public_deps = [
     ":allocator",
     ":block",
-    ":block_allocator",
     ":buffer",
+    ":first_fit_block_allocator",
     ":tracking_allocator",
     dir_pw_bytes,
     dir_pw_result,
@@ -292,6 +336,21 @@
   deps = [ dir_pw_assert ]
 }
 
+pw_source_set("block_allocator_testing") {
+  public = [ "public/pw_allocator/block_allocator_testing.h" ]
+  public_deps = [
+    ":block",
+    ":block_allocator",
+    dir_pw_unit_test,
+  ]
+  deps = [
+    "$dir_pw_bytes:alignment",
+    dir_pw_assert,
+    dir_pw_status,
+  ]
+  sources = [ "block_allocator_testing.cc" ]
+}
+
 pw_source_set("test_harness") {
   public = [ "public/pw_allocator/test_harness.h" ]
   public_deps = [
@@ -333,12 +392,12 @@
   sources = [ "allocator_test.cc" ]
 }
 
-pw_test("block_test") {
+pw_test("best_fit_block_allocator_test") {
   deps = [
-    ":block",
-    dir_pw_span,
+    ":best_fit_block_allocator",
+    ":block_allocator_testing",
   ]
-  sources = [ "block_test.cc" ]
+  sources = [ "best_fit_block_allocator_test.cc" ]
 
   # TODO: https://pwbug.dev/325509758 - Doesn't work on the Pico yet; hangs
   # indefinitely.
@@ -347,14 +406,12 @@
   }
 }
 
-pw_test("block_allocator_test") {
+pw_test("block_test") {
   deps = [
     ":block",
-    ":block_allocator",
-    ":buffer",
-    dir_pw_bytes,
+    dir_pw_span,
   ]
-  sources = [ "block_allocator_test.cc" ]
+  sources = [ "block_test.cc" ]
 
   # TODO: https://pwbug.dev/325509758 - Doesn't work on the Pico yet; hangs
   # indefinitely.
@@ -390,6 +447,20 @@
   sources = [ "buddy_allocator_test.cc" ]
 }
 
+pw_test("dual_first_fit_block_allocator_test") {
+  deps = [
+    ":block_allocator_testing",
+    ":dual_first_fit_block_allocator",
+  ]
+  sources = [ "dual_first_fit_block_allocator_test.cc" ]
+
+  # TODO: https://pwbug.dev/325509758 - Doesn't work on the Pico yet; hangs
+  # indefinitely.
+  if (pw_build_EXECUTABLE_TARGET_TYPE == "pico_executable") {
+    enable_if = false
+  }
+}
+
 pw_test("fallback_allocator_test") {
   deps = [
     ":fallback_allocator",
@@ -399,6 +470,21 @@
   sources = [ "fallback_allocator_test.cc" ]
 }
 
+pw_test("first_fit_block_allocator_test") {
+  deps = [
+    ":block_allocator_testing",
+    ":buffer",
+    ":first_fit_block_allocator",
+  ]
+  sources = [ "first_fit_block_allocator_test.cc" ]
+
+  # TODO: https://pwbug.dev/325509758 - Doesn't work on the Pico yet; hangs
+  # indefinitely.
+  if (pw_build_EXECUTABLE_TARGET_TYPE == "pico_executable") {
+    enable_if = false
+  }
+}
+
 pw_test("freelist_test") {
   deps = [
     ":freelist",
@@ -425,6 +511,20 @@
   }
 }
 
+pw_test("last_fit_block_allocator_test") {
+  deps = [
+    ":block_allocator_testing",
+    ":last_fit_block_allocator",
+  ]
+  sources = [ "last_fit_block_allocator_test.cc" ]
+
+  # TODO: https://pwbug.dev/325509758 - Doesn't work on the Pico yet; hangs
+  # indefinitely.
+  if (pw_build_EXECUTABLE_TARGET_TYPE == "pico_executable") {
+    enable_if = false
+  }
+}
+
 pw_test("libc_allocator_test") {
   deps = [ ":libc_allocator" ]
   sources = [ "libc_allocator_test.cc" ]
@@ -483,25 +583,43 @@
   sources = [ "unique_ptr_test.cc" ]
 }
 
+pw_test("worst_fit_block_allocator_test") {
+  deps = [
+    ":block_allocator_testing",
+    ":worst_fit_block_allocator",
+  ]
+  sources = [ "worst_fit_block_allocator_test.cc" ]
+
+  # TODO: https://pwbug.dev/325509758 - Doesn't work on the Pico yet; hangs
+  # indefinitely.
+  if (pw_build_EXECUTABLE_TARGET_TYPE == "pico_executable") {
+    enable_if = false
+  }
+}
+
 pw_test_group("tests") {
   tests = [
     ":allocator_as_pool_test",
     ":allocator_test",
-    ":block_allocator_test",
+    ":best_fit_block_allocator_test",
     ":block_test",
     ":buddy_allocator_test",
     ":buffer_test",
     ":bump_allocator_test",
     ":chunk_pool_test",
+    ":dual_first_fit_block_allocator_test",
     ":fallback_allocator_test",
+    ":first_fit_block_allocator_test",
     ":freelist_test",
     ":freelist_heap_test",
+    ":last_fit_block_allocator_test",
     ":libc_allocator_test",
     ":null_allocator_test",
     ":typed_pool_test",
     ":synchronized_allocator_test",
     ":tracking_allocator_test",
     ":unique_ptr_test",
+    ":worst_fit_block_allocator_test",
   ]
   group_deps = [ "examples" ]
 }
diff --git a/pw_allocator/CMakeLists.txt b/pw_allocator/CMakeLists.txt
index 6c42baf..8a2d3e6 100644
--- a/pw_allocator/CMakeLists.txt
+++ b/pw_allocator/CMakeLists.txt
@@ -41,6 +41,15 @@
     allocator_as_pool.cc
 )
 
+pw_add_library(pw_allocator.best_fit_block_allocator INTERFACE
+  HEADERS
+    public/pw_allocator/best_fit_block_allocator.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_allocator.block_allocator_base
+)
+
 pw_add_library(pw_allocator.block STATIC
   HEADERS
     public/pw_allocator/block.h
@@ -61,15 +70,30 @@
     block.cc
 )
 
-pw_add_library(pw_allocator.block_allocator STATIC
+# TODO(b/326509341): Remove when all customers depend on the correct targets.
+pw_add_library(pw_allocator.block_allocator INTERFACE
   HEADERS
     public/pw_allocator/block_allocator.h
   PUBLIC_INCLUDES
     public
   PUBLIC_DEPS
+    pw_allocator.first_fit_block_allocator
+    pw_allocator.last_fit_block_allocator
+    pw_allocator.best_fit_block_allocator
+    pw_allocator.worst_fit_block_allocator
+    pw_allocator.dual_first_fit_block_allocator
+)
+
+# TODO(b/326509341): Rename when all customers depend on the correct targets.
+pw_add_library(pw_allocator.block_allocator_base STATIC
+  HEADERS
+    public/pw_allocator/block_allocator_base.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
     pw_allocator.allocator
     pw_allocator.block
-    pw_bytes
+    pw_bytes.alignment
     pw_result
     pw_status
   PRIVATE_DEPS
@@ -174,6 +198,15 @@
     pw_status
 )
 
+pw_add_library(pw_allocator.dual_first_fit_block_allocator INTERFACE
+  HEADERS
+    public/pw_allocator/dual_first_fit_block_allocator.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_allocator.block_allocator_base
+)
+
 pw_add_library(pw_allocator.fallback_allocator INTERFACE
   HEADERS
     public/pw_allocator/fallback_allocator.h
@@ -187,6 +220,15 @@
     pw_tokenizer
 )
 
+pw_add_library(pw_allocator.first_fit_block_allocator INTERFACE
+  HEADERS
+    public/pw_allocator/first_fit_block_allocator.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_allocator.block_allocator_base
+)
+
 pw_add_library(pw_allocator.freelist STATIC
   HEADERS
     public/pw_allocator/freelist.h
@@ -216,6 +258,15 @@
     freelist_heap.cc
 )
 
+pw_add_library(pw_allocator.last_fit_block_allocator INTERFACE
+  HEADERS
+    public/pw_allocator/last_fit_block_allocator.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_allocator.block_allocator_base
+)
+
 pw_add_library(pw_allocator.libc_allocator STATIC
   SOURCES
     libc_allocator.cc
@@ -281,6 +332,15 @@
     pw_result
 )
 
+pw_add_library(pw_allocator.worst_fit_block_allocator INTERFACE
+  HEADERS
+    public/pw_allocator/worst_fit_block_allocator.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_allocator.block_allocator_base
+)
+
 # Test support
 
 pw_add_library(pw_allocator.testing INTERFACE
@@ -291,8 +351,8 @@
   PUBLIC_DEPS
     pw_allocator.allocator
     pw_allocator.block
-    pw_allocator.block_allocator
     pw_allocator.buffer
+    pw_allocator.first_fit_block_allocator
     pw_allocator.tracking_allocator
     pw_bytes
     pw_result
@@ -302,6 +362,24 @@
     pw_assert
 )
 
+pw_add_library(pw_allocator.block_allocator_testing STATIC
+  HEADERS
+    public/pw_allocator/block_allocator_testing.h
+    public/pw_allocator/testing.h
+  PUBLIC_INCLUDES
+    public
+  PUBLIC_DEPS
+    pw_allocator.block
+    pw_allocator.block_allocator
+    pw_unit_test
+  PRIVATE_DEPS
+    pw_assert
+    pw_bytes.alignment
+    pw_status
+  SOURCES
+    block_allocator_testing.cc
+)
+
 pw_add_library(pw_allocator.test_harness STATIC
   HEADERS
     public/pw_allocator/test_harness.h
@@ -354,6 +432,17 @@
     pw_allocator
 )
 
+pw_add_test(pw_allocator.best_fit_block_allocator_test
+  SOURCES
+    best_fit_block_allocator_test.cc
+  PRIVATE_DEPS
+    pw_allocator.block_allocator_testing
+    pw_allocator.best_fit_block_allocator
+  GROUPS
+    modules
+    pw_allocator
+)
+
 pw_add_test(pw_allocator.block_test
   SOURCES
     block_test.cc
@@ -365,19 +454,6 @@
     pw_allocator
 )
 
-pw_add_test(pw_allocator.block_allocator_test
-  SOURCES
-    block_allocator_test.cc
-  PRIVATE_DEPS
-    pw_allocator.block
-    pw_allocator.block_allocator
-    pw_allocator.buffer
-    pw_bytes
-  GROUPS
-    modules
-    pw_allocator
-)
-
 pw_add_test(pw_allocator.buffer_test
   SOURCES
     buffer_test.cc
@@ -418,6 +494,17 @@
     pw_allocator
 )
 
+pw_add_test(pw_allocator.dual_first_fit_block_allocator_test
+  SOURCES
+    dual_first_fit_block_allocator_test.cc
+  PRIVATE_DEPS
+    pw_allocator.block_allocator_testing
+    pw_allocator.dual_first_fit_block_allocator
+  GROUPS
+    modules
+    pw_allocator
+)
+
 pw_add_test(pw_allocator.fallback_allocator_test
   PRIVATE_DEPS
     pw_allocator.testing
@@ -430,6 +517,18 @@
     pw_allocator
 )
 
+pw_add_test(pw_allocator.first_fit_block_allocator_test
+  SOURCES
+    first_fit_block_allocator_test.cc
+  PRIVATE_DEPS
+    pw_allocator.block_allocator_testing
+    pw_allocator.buffer
+    pw_allocator.first_fit_block_allocator
+  GROUPS
+    modules
+    pw_allocator
+)
+
 pw_add_test(pw_allocator.freelist_test
   SOURCES
     freelist_test.cc
@@ -452,6 +551,17 @@
     pw_allocator
 )
 
+pw_add_test(pw_allocator.last_fit_block_allocator_test
+  SOURCES
+    last_fit_block_allocator_test.cc
+  PRIVATE_DEPS
+    pw_allocator.block_allocator_testing
+    pw_allocator.last_fit_block_allocator
+  GROUPS
+    modules
+    pw_allocator
+)
+
 pw_add_test(pw_allocator.libc_allocator_test
   SOURCES
     libc_allocator_test.cc
@@ -527,3 +637,14 @@
     modules
     pw_allocator
 )
+
+pw_add_test(pw_allocator.worst_fit_block_allocator_test
+  SOURCES
+    worst_fit_block_allocator_test.cc
+  PRIVATE_DEPS
+    pw_allocator.block_allocator_testing
+    pw_allocator.worst_fit_block_allocator
+  GROUPS
+    modules
+    pw_allocator
+)
diff --git a/pw_allocator/api.rst b/pw_allocator/api.rst
index 5d7fce7..b5dd5d0 100644
--- a/pw_allocator/api.rst
+++ b/pw_allocator/api.rst
@@ -96,7 +96,7 @@
 
 BlockAllocator
 ==============
-.. doxygenclass:: pw::allocator::internal::BlockAllocator
+.. doxygenclass:: pw::allocator::BlockAllocator
    :members:
 
 .. _module-pw_allocator-api-first_fit_block_allocator:
diff --git a/pw_allocator/best_fit_block_allocator_test.cc b/pw_allocator/best_fit_block_allocator_test.cc
new file mode 100644
index 0000000..b522a64
--- /dev/null
+++ b/pw_allocator/best_fit_block_allocator_test.cc
@@ -0,0 +1,120 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_allocator/best_fit_block_allocator.h"
+
+#include "pw_allocator/block_allocator_testing.h"
+#include "pw_unit_test/framework.h"
+
+namespace pw::allocator {
+namespace {
+
+// Test fixtures.
+
+using BlockAllocatorTest = test::BlockAllocatorTest;
+using Preallocation = test::Preallocation;
+using BestFitBlockAllocatorType =
+    BestFitBlockAllocator<BlockAllocatorTest::OffsetType>;
+
+class BestFitBlockAllocatorTest : public BlockAllocatorTest {
+ public:
+  BestFitBlockAllocatorTest() : BlockAllocatorTest(allocator_) {}
+
+ private:
+  BestFitBlockAllocatorType allocator_;
+};
+
+// Unit tests.
+
+TEST_F(BestFitBlockAllocatorTest, CanAutomaticallyInit) {
+  BestFitBlockAllocatorType allocator(GetBytes());
+  CanAutomaticallyInit(allocator);
+}
+
+TEST_F(BestFitBlockAllocatorTest, CanExplicitlyInit) {
+  BestFitBlockAllocatorType allocator;
+  CanExplicitlyInit(allocator);
+}
+
+TEST_F(BestFitBlockAllocatorTest, GetCapacity) { GetCapacity(); }
+
+TEST_F(BestFitBlockAllocatorTest, AllocateLarge) { AllocateLarge(); }
+
+TEST_F(BestFitBlockAllocatorTest, AllocateSmall) { AllocateSmall(); }
+
+TEST_F(BestFitBlockAllocatorTest, AllocateLargeAlignment) {
+  AllocateLargeAlignment();
+}
+
+TEST_F(BestFitBlockAllocatorTest, AllocateAlignmentFailure) {
+  AllocateAlignmentFailure();
+}
+
+TEST_F(BestFitBlockAllocatorTest, AllocatesBestCompatible) {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 1},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 3},
+      {kSmallerOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 5},
+      {kLargerOuterSize, Preallocation::kIndexFree},
+      {Preallocation::kSizeRemaining, 7},
+  });
+
+  Store(2, allocator.Allocate(Layout(kSmallInnerSize, 1)));
+  EXPECT_EQ(NextAfter(1), Fetch(2));
+  EXPECT_EQ(NextAfter(2), Fetch(3));
+  Store(0, allocator.Allocate(Layout(kLargeInnerSize, 1)));
+  EXPECT_EQ(NextAfter(0), Fetch(1));
+}
+
+TEST_F(BestFitBlockAllocatorTest, DeallocateNull) { DeallocateNull(); }
+
+TEST_F(BestFitBlockAllocatorTest, DeallocateShuffled) { DeallocateShuffled(); }
+
+TEST_F(BestFitBlockAllocatorTest, IterateOverBlocks) { IterateOverBlocks(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeNull) { ResizeNull(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeLargeSame) { ResizeLargeSame(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeLargeSmaller) { ResizeLargeSmaller(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeLargeLarger) { ResizeLargeLarger(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeLargeLargerFailure) {
+  ResizeLargeLargerFailure();
+}
+
+TEST_F(BestFitBlockAllocatorTest, ResizeSmallSame) { ResizeSmallSame(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeSmallSmaller) { ResizeSmallSmaller(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeSmallLarger) { ResizeSmallLarger(); }
+
+TEST_F(BestFitBlockAllocatorTest, ResizeSmallLargerFailure) {
+  ResizeSmallLargerFailure();
+}
+
+TEST_F(BestFitBlockAllocatorTest, CanGetLayoutFromValidPointer) {
+  CanGetLayoutFromValidPointer();
+}
+
+TEST_F(BestFitBlockAllocatorTest, CannotGetLayoutFromInvalidPointer) {
+  CannotGetLayoutFromInvalidPointer();
+}
+
+}  // namespace
+}  // namespace pw::allocator
diff --git a/pw_allocator/block_allocator.cc b/pw_allocator/block_allocator.cc
index 721266e..c20c049 100644
--- a/pw_allocator/block_allocator.cc
+++ b/pw_allocator/block_allocator.cc
@@ -12,8 +12,7 @@
 // License for the specific language governing permissions and limitations under
 // the License.
 
-#include "pw_allocator/block_allocator.h"
-
+#include "pw_allocator/block_allocator_base.h"
 #include "pw_assert/check.h"
 
 namespace pw::allocator::internal {
diff --git a/pw_allocator/block_allocator_test.cc b/pw_allocator/block_allocator_test.cc
deleted file mode 100644
index 3bdf7ed..0000000
--- a/pw_allocator/block_allocator_test.cc
+++ /dev/null
@@ -1,793 +0,0 @@
-// Copyright 2024 The Pigweed Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//     https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#include "pw_allocator/block_allocator.h"
-
-#include <cstdint>
-#include <limits>
-
-#include "pw_allocator/allocator.h"
-#include "pw_allocator/block.h"
-#include "pw_allocator/buffer.h"
-#include "pw_assert/check.h"
-#include "pw_bytes/alignment.h"
-#include "pw_bytes/span.h"
-#include "pw_status/status.h"
-#include "pw_unit_test/framework.h"
-#include "pw_unit_test/framework_backend.h"
-
-namespace pw::allocator {
-namespace {
-
-// Test fixtures.
-
-using OffsetType = uint16_t;
-
-// Size of the memory region to use in the tests below.
-static constexpr size_t kCapacity = 1024;
-
-// Represents the sizes of varios allocations.
-static constexpr size_t kLargeInnerSize = kCapacity / 8;
-static constexpr size_t kLargeOuterSize =
-    Block<OffsetType>::kBlockOverhead + kLargeInnerSize;
-
-static constexpr size_t kSmallInnerSize = Block<OffsetType>::kBlockOverhead * 2;
-static constexpr size_t kSmallOuterSize =
-    Block<OffsetType>::kBlockOverhead + kSmallInnerSize;
-
-static constexpr size_t kSmallerOuterSize = kSmallInnerSize;
-static constexpr size_t kLargerOuterSize = kLargeOuterSize + kSmallerOuterSize;
-
-// Minimum size of a "large" allocation; allocation less than this size are
-// considered "small" when using the DualFirstFit strategy.
-static constexpr size_t kDualFitThreshold = kSmallInnerSize * 2;
-
-// The number of allocated pointers cached by the test fixture.
-static constexpr size_t kNumPtrs = 16;
-
-/// Represents an initial state for a memory block.
-///
-/// Unit tests can specify an initial block layout by passing a list of these
-/// structs to `Preallocate`.
-///
-/// The outer size of each block must be at least `kBlockOverhead` for the
-/// block type in use. The special `kSizeRemaining` may be used for at most
-/// one block to give it any space not assigned to other blocks.
-///
-/// The index must be less than `BlockAllocatorTestFixture::kNumPtrs` or one
-/// of the special values `kIndexFree` or `kIndexNext`. A regular index will
-/// mark the block as "used" and cache the pointer to its usable space in
-/// `ptrs[index]`. The special value `kIndexFree` will leave the block as
-/// "free". The special value `kIndexNext` will mark the block as "used" and
-/// cache its pointer in the next available slot in `test_fixture`. This
-/// may be used/ when the pointer is not needed for the test but should still
-/// be automatically freed at the end of the test.
-///
-/// Example:
-/// @code{.cpp}
-///   // BlockType = UnpoisonedBlock<uint32_t>, so kBlockOverhead == 8.
-///   ASSERT_EQ(Preallocate({
-///     {32,              0},           // ptrs[0] == 24 byte region.
-///     {24,              kIndexFree},  // Free block of 16 bytes.
-///     {48,              2},           // ptrs[2] == 40 byte region.
-///     {kSizeRemaining,  kIndesFree},  // Free block of leftover space.
-///     {64,              4},           // ptrs[4] == 56 byte region from the
-///                                     //             end of the allocator.
-///   }), OkStatus());
-/// @endcode
-struct Preallocation {
-  /// The outer size of the block to preallocate.
-  size_t outer_size;
-
-  // Index into the `test_fixture` array where the pointer to the block's
-  // space should be cached.
-  size_t index;
-
-  /// Special value indicating the block should comprise of the all remaining
-  /// space not preallocated to any other block. May be used at most once.
-  static constexpr size_t kSizeRemaining = std::numeric_limits<size_t>::max();
-
-  /// Special value indicating the block should be treated as unallocated,
-  /// i.e. it's pointer should not be cached.
-  static constexpr size_t kIndexFree = kNumPtrs + 1;
-
-  /// Special value indicating to use the next available index.
-  static constexpr size_t kIndexNext = kNumPtrs + 2;
-};
-
-/// Test fixture responsible for managing a memory region and an allocator that
-/// allocates from it.
-template <typename BlockAllocatorType>
-class TestFixture {
- public:
-  using block_allocator_type = BlockAllocatorType;
-  using BlockType = typename block_allocator_type::BlockType;
-
-  TestFixture() { ptrs_.fill(nullptr); }
-
-  ~TestFixture() {
-    for (size_t i = 0; i < kNumPtrs; ++i) {
-      if (ptrs_[i] != nullptr) {
-        allocator_->Deallocate(ptrs_[i]);
-      }
-    }
-    allocator_->Reset();
-  }
-
-  /// Returns the underlying memory region.
-  ByteSpan bytes() { return ByteSpan(allocator_.data(), allocator_.size()); }
-
-  /// Returns a references to a cached pointer.
-  void*& operator[](size_t index) { return ptrs_[index]; }
-
-  /// Initialize the allocator with a region of memory and return it.
-  block_allocator_type& GetAllocator() {
-    EXPECT_EQ(allocator_->Init(bytes()), OkStatus());
-    return *allocator_;
-  }
-
-  /// Initialize the allocator with a sequence of preallocated blocks and return
-  /// it.
-  ///
-  /// See also ``Preallocation``.
-  block_allocator_type& GetAllocator(
-      std::initializer_list<Preallocation> preallocations) {
-    // First, look if any blocks use kSizeRemaining, and calculate how large
-    // that will be.
-    size_t remaining_outer_size = kCapacity;
-    for (auto& preallocation : preallocations) {
-      if (preallocation.outer_size != Preallocation::kSizeRemaining) {
-        size_t outer_size =
-            AlignUp(preallocation.outer_size, BlockType::kAlignment);
-        PW_CHECK_INT_GE(remaining_outer_size, outer_size);
-        remaining_outer_size -= outer_size;
-      }
-    }
-
-    Result<BlockType*> result = BlockType::Init(bytes());
-    PW_CHECK_OK(result.status());
-    BlockType* block = *result;
-    void* begin = block->UsableSpace();
-
-    // To prevent free blocks being merged back into the block of available
-    // space, treat the available space as being used.
-    block->MarkUsed();
-
-    size_t next_index = 0;
-    for (auto& preallocation : preallocations) {
-      PW_CHECK_NOTNULL(block);
-
-      // Perform the allocation.
-      size_t outer_size = preallocation.outer_size;
-      if (outer_size == Preallocation::kSizeRemaining) {
-        outer_size = remaining_outer_size;
-        remaining_outer_size = 0;
-      }
-      size_t inner_size = outer_size - BlockType::kBlockOverhead;
-
-      block->MarkFree();
-      PW_CHECK_OK(BlockType::AllocFirst(block, inner_size, 1));
-      if (!block->Last()) {
-        block->Next()->MarkUsed();
-      }
-
-      // Free the block or cache the allocated pointer.
-      if (preallocation.index == Preallocation::kIndexFree) {
-        BlockType::Free(block);
-
-      } else if (preallocation.index == Preallocation::kIndexNext) {
-        while (true) {
-          PW_CHECK_INT_LT(next_index, kNumPtrs);
-          if (ptrs_[next_index] == nullptr &&
-              std::all_of(preallocations.begin(),
-                          preallocations.end(),
-                          [next_index](const Preallocation& other) {
-                            return other.index != next_index;
-                          })) {
-            break;
-          }
-          ++next_index;
-        }
-        ptrs_[next_index] = block->UsableSpace();
-
-      } else {
-        ptrs_[preallocation.index] = block->UsableSpace();
-      }
-      block = block->Next();
-    }
-    if (block != nullptr) {
-      block->MarkFree();
-    }
-    block = BlockType::FromUsableSpace(begin);
-    PW_CHECK_OK(allocator_->Init(block));
-    return *allocator_;
-  }
-
-  /// Gets the next allocation from an allocated pointer.
-  void* NextAfter(size_t index) {
-    if (index > ptrs_.size()) {
-      return nullptr;
-    }
-    auto* block = BlockType::FromUsableSpace(ptrs_[index]);
-    while (!block->Last()) {
-      block = block->Next();
-      if (block->Used()) {
-        return block->UsableSpace();
-      }
-    }
-    return nullptr;
-  }
-
- private:
-  WithBuffer<BlockAllocatorType, kCapacity, BlockType::kAlignment> allocator_;
-  std::array<void*, kNumPtrs> ptrs_;
-};
-
-/// Ensures the memory is usable by writing to it.
-void UseMemory(void* ptr, size_t size) { std::memset(ptr, 0x5a, size); }
-
-#define TEST_ONE_STRATEGY(strategy, test_case)                      \
-  TEST(BlockAllocatorTest, test_case##_##strategy) {                \
-    TestFixture<strategy##BlockAllocator<OffsetType>> test_fixture; \
-    test_case(test_fixture);                                        \
-  }
-
-#define TEST_FOREACH_STRATEGY(test_case) \
-  TEST_ONE_STRATEGY(FirstFit, test_case) \
-  TEST_ONE_STRATEGY(LastFit, test_case)  \
-  TEST_ONE_STRATEGY(BestFit, test_case)  \
-  TEST_ONE_STRATEGY(WorstFit, test_case) \
-  TEST_ONE_STRATEGY(DualFirstFit, test_case)
-
-// Unit tests.
-
-template <typename TestFixtureType>
-void CanAutomaticallyInit(TestFixtureType& test_fixture) {
-  typename TestFixtureType::block_allocator_type allocator(
-      test_fixture.bytes());
-  EXPECT_NE(*(allocator.blocks().begin()), nullptr);
-}
-TEST_ONE_STRATEGY(FirstFit, CanAutomaticallyInit)
-TEST_ONE_STRATEGY(LastFit, CanAutomaticallyInit)
-TEST_ONE_STRATEGY(BestFit, CanAutomaticallyInit)
-TEST_ONE_STRATEGY(WorstFit, CanAutomaticallyInit)
-TEST(BlockAllocatorTest, CanAutomaticallyInit_DualFirstFit) {
-  std::array<std::byte, kCapacity> buffer;
-  ByteSpan bytes(buffer);
-  DualFirstFitBlockAllocator<OffsetType> allocator(bytes, kDualFitThreshold);
-  EXPECT_NE(*(allocator.blocks().begin()), nullptr);
-}
-
-template <typename TestFixtureType>
-void CanExplicitlyInit(TestFixtureType& test_fixture) {
-  typename TestFixtureType::block_allocator_type allocator;
-  EXPECT_EQ(*(allocator.blocks().begin()), nullptr);
-  EXPECT_EQ(allocator.Init(test_fixture.bytes()), OkStatus());
-  EXPECT_NE(*(allocator.blocks().begin()), nullptr);
-}
-TEST_FOREACH_STRATEGY(CanExplicitlyInit)
-
-template <typename TestFixtureType>
-void GetCapacity(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  StatusWithSize capacity = allocator.GetCapacity();
-  EXPECT_EQ(capacity.status(), OkStatus());
-  EXPECT_EQ(capacity.size(), kCapacity);
-}
-
-template <typename TestFixtureType>
-void AllocateLarge(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr Layout layout = Layout::Of<std::byte[kLargeInnerSize]>();
-  test_fixture[0] = allocator.Allocate(layout);
-  ASSERT_NE(test_fixture[0], nullptr);
-  ByteSpan bytes = test_fixture.bytes();
-  EXPECT_GE(test_fixture[0], bytes.data());
-  EXPECT_LE(test_fixture[0], bytes.data() + bytes.size());
-  UseMemory(test_fixture[0], layout.size());
-}
-TEST_FOREACH_STRATEGY(AllocateLarge)
-
-template <typename TestFixtureType>
-void AllocateSmall(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
-  test_fixture[0] = allocator.Allocate(layout);
-  ASSERT_NE(test_fixture[0], nullptr);
-  ByteSpan bytes = test_fixture.bytes();
-  EXPECT_GE(test_fixture[0], bytes.data());
-  EXPECT_LE(test_fixture[0], bytes.data() + bytes.size());
-  UseMemory(test_fixture[0], layout.size());
-}
-TEST_FOREACH_STRATEGY(AllocateSmall)
-
-template <typename TestFixtureType>
-void AllocateTooLarge(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  test_fixture[0] = allocator.Allocate(Layout::Of<std::byte[kCapacity * 2]>());
-  EXPECT_EQ(test_fixture[0], nullptr);
-}
-TEST_FOREACH_STRATEGY(AllocateTooLarge)
-
-template <typename TestFixtureType>
-void AllocateLargeAlignment(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr size_t kAlignment = 64;
-  test_fixture[0] = allocator.Allocate(Layout(kLargeInnerSize, kAlignment));
-  ASSERT_NE(test_fixture[0], nullptr);
-  EXPECT_EQ(reinterpret_cast<uintptr_t>(test_fixture[0]) % kAlignment, 0U);
-  UseMemory(test_fixture[0], kLargeInnerSize);
-
-  test_fixture[1] = allocator.Allocate(Layout(kLargeInnerSize, kAlignment));
-  ASSERT_NE(test_fixture[1], nullptr);
-  EXPECT_EQ(reinterpret_cast<uintptr_t>(test_fixture[1]) % kAlignment, 0U);
-  UseMemory(test_fixture[1], kLargeInnerSize);
-}
-TEST_FOREACH_STRATEGY(AllocateLargeAlignment)
-
-template <typename TestFixtureType>
-void AllocateAlignmentFailure(TestFixtureType& test_fixture) {
-  // Allocate a two blocks with an unaligned region between them.
-  using BlockType = typename TestFixtureType::BlockType;
-  constexpr size_t kAlignment = 128;
-  ByteSpan bytes = test_fixture.bytes();
-  auto addr = reinterpret_cast<uintptr_t>(bytes.data());
-  uintptr_t outer_size =
-      AlignUp(addr + BlockType::kBlockOverhead, kAlignment) - addr + 1;
-  auto& allocator = test_fixture.GetAllocator({
-      {outer_size, 0},
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {Preallocation::kSizeRemaining, 2},
-  });
-
-  // The allocator should be unable to create an aligned region..
-  test_fixture[1] = allocator.Allocate(Layout(kLargeInnerSize, kAlignment));
-  EXPECT_EQ(test_fixture[1], nullptr);
-}
-TEST_FOREACH_STRATEGY(AllocateAlignmentFailure)
-
-TEST(FirstFit, AllocatesFirstCompatible) {
-  TestFixture<FirstFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 1},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 3},
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {Preallocation::kSizeRemaining, 5},
-  });
-
-  test_fixture[0] = allocator.Allocate(Layout(kSmallInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(0), test_fixture[1]);
-  test_fixture[4] = allocator.Allocate(Layout(kLargeInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(3), test_fixture[4]);
-  EXPECT_EQ(test_fixture.NextAfter(4), test_fixture[5]);
-}
-
-TEST(LastFit, AllocatesLastCompatible) {
-  TestFixture<LastFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 1},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 3},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {Preallocation::kSizeRemaining, 5},
-  });
-
-  test_fixture[0] = allocator.Allocate(Layout(kLargeInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(0), test_fixture[1]);
-  test_fixture[4] = allocator.Allocate(Layout(kSmallInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(3), test_fixture[4]);
-  EXPECT_EQ(test_fixture.NextAfter(4), test_fixture[5]);
-}
-
-TEST(BestFit, AllocatesBestCompatible) {
-  TestFixture<BestFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 1},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 3},
-      {kSmallerOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 5},
-      {kLargerOuterSize, Preallocation::kIndexFree},
-      {Preallocation::kSizeRemaining, 7},
-  });
-
-  test_fixture[2] = allocator.Allocate(Layout(kSmallInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(1), test_fixture[2]);
-  EXPECT_EQ(test_fixture.NextAfter(2), test_fixture[3]);
-  test_fixture[0] = allocator.Allocate(Layout(kLargeInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(0), test_fixture[1]);
-}
-
-TEST(WorstFit, AllocatesWorstCompatible) {
-  TestFixture<WorstFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 1},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 3},
-      {kSmallerOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 5},
-      {kLargerOuterSize, Preallocation::kIndexFree},
-      {Preallocation::kSizeRemaining, 7},
-  });
-
-  test_fixture[6] = allocator.Allocate(Layout(kLargeInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(5), test_fixture[6]);
-  EXPECT_EQ(test_fixture.NextAfter(6), test_fixture[7]);
-  test_fixture[0] = allocator.Allocate(Layout(kSmallInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(0), test_fixture[1]);
-}
-
-TEST(DualFirstFit, AllocatesUsingThreshold) {
-  TestFixture<DualFirstFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargerOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 1},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {Preallocation::kSizeRemaining, 3},
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kSmallerOuterSize, 5},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-  });
-  allocator.set_threshold(kDualFitThreshold);
-
-  test_fixture[0] = allocator.Allocate(Layout(kLargeInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(0), test_fixture[1]);
-  test_fixture[4] = allocator.Allocate(Layout(kLargeInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(3), test_fixture[4]);
-  EXPECT_EQ(test_fixture.NextAfter(4), test_fixture[5]);
-  test_fixture[6] = allocator.Allocate(Layout(kSmallInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(5), test_fixture[6]);
-  EXPECT_EQ(test_fixture.NextAfter(6), test_fixture[7]);
-  test_fixture[2] = allocator.Allocate(Layout(kSmallInnerSize, 1));
-  EXPECT_EQ(test_fixture.NextAfter(1), test_fixture[2]);
-  EXPECT_EQ(test_fixture.NextAfter(2), test_fixture[3]);
-}
-
-template <typename TestFixtureType>
-void DeallocateNull(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  allocator.Deallocate(nullptr);
-}
-TEST_FOREACH_STRATEGY(DeallocateNull)
-
-template <typename TestFixtureType>
-void DeallocateShuffled(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
-  for (size_t i = 0; i < kNumPtrs; ++i) {
-    test_fixture[i] = allocator.Allocate(layout);
-    if (test_fixture[i] == nullptr) {
-      break;
-    }
-  }
-
-  // Mix up the order of allocations.
-  for (size_t i = 0; i < kNumPtrs; ++i) {
-    if (i % 2 == 0 && i + 1 < kNumPtrs) {
-      std::swap(test_fixture[i], test_fixture[i + 1]);
-    }
-    if (i % 3 == 0 && i + 2 < kNumPtrs) {
-      std::swap(test_fixture[i], test_fixture[i + 2]);
-    }
-  }
-
-  // Deallocate everything.
-  for (size_t i = 0; i < kNumPtrs; ++i) {
-    allocator.Deallocate(test_fixture[i]);
-    test_fixture[i] = nullptr;
-  }
-}
-TEST_FOREACH_STRATEGY(DeallocateShuffled)
-
-TEST(BlockAllocatorTest, DisablePoisoning) {
-  using BlockAllocatorType = FirstFitBlockAllocator<OffsetType, 0>;
-  using BlockType = BlockAllocatorType::BlockType;
-  TestFixture<BlockAllocatorType> test_fixture;
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
-
-  // Create a bunch of blocks
-  for (size_t i = 0; i < kNumPtrs; ++i) {
-    test_fixture[i] = allocator.Allocate(layout);
-    ASSERT_NE(test_fixture[i], nullptr);
-  }
-  for (size_t i = 0; i < kNumPtrs; i += 2) {
-    uint8_t* ptr = static_cast<uint8_t*>(test_fixture[i]);
-    test_fixture[i] = nullptr;
-
-    // Free every other to prevent merging.
-    allocator.Deallocate(ptr);
-
-    // Modify the contents of the block and check if it is still valid.
-    auto* block = BlockType::FromUsableSpace(ptr);
-    EXPECT_FALSE(block->Used());
-    EXPECT_TRUE(block->IsValid());
-    ptr[0] = ~ptr[0];
-    EXPECT_TRUE(block->IsValid());
-  }
-}
-
-TEST(BlockAllocatorTest, PoisonEveryFreeBlock) {
-  using BlockAllocatorType = FirstFitBlockAllocator<OffsetType, 1>;
-  using BlockType = BlockAllocatorType::BlockType;
-  TestFixture<BlockAllocatorType> test_fixture;
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
-
-  // Create a bunch of blocks
-  for (size_t i = 0; i < kNumPtrs; ++i) {
-    test_fixture[i] = allocator.Allocate(layout);
-    ASSERT_NE(test_fixture[i], nullptr);
-  }
-  for (size_t i = 0; i < kNumPtrs; i += 2) {
-    uint8_t* ptr = static_cast<uint8_t*>(test_fixture[i]);
-    test_fixture[i] = nullptr;
-
-    // Free every other to prevent merging.
-    allocator.Deallocate(ptr);
-
-    // Modify the contents of the block and check if it is still valid.
-    auto* block = BlockType::FromUsableSpace(ptr);
-    EXPECT_FALSE(block->Used());
-    EXPECT_TRUE(block->IsValid());
-    ptr[0] = ~ptr[0];
-    EXPECT_FALSE(block->IsValid());
-  }
-}
-
-TEST(BlockAllocatorTest, PoisonPeriodically) {
-  using BlockAllocatorType = FirstFitBlockAllocator<OffsetType, 4>;
-  using BlockType = BlockAllocatorType::BlockType;
-  TestFixture<BlockAllocatorType> test_fixture;
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
-
-  // Create a bunch of blocks
-  for (size_t i = 0; i < kNumPtrs; ++i) {
-    test_fixture[i] = allocator.Allocate(layout);
-    ASSERT_NE(test_fixture[i], nullptr);
-  }
-  for (size_t i = 0; i < kNumPtrs; i += 2) {
-    uint8_t* ptr = static_cast<uint8_t*>(test_fixture[i]);
-    test_fixture[i] = nullptr;
-
-    // Free every other to prevent merging.
-    allocator.Deallocate(ptr);
-
-    // Modify the contents of the block and check if it is still valid.
-    auto* block = BlockType::FromUsableSpace(ptr);
-    EXPECT_FALSE(block->Used());
-    EXPECT_TRUE(block->IsValid());
-    ptr[0] = ~ptr[0];
-    if ((i / 2) % 4 == 3) {
-      EXPECT_FALSE(block->IsValid());
-    } else {
-      EXPECT_TRUE(block->IsValid());
-    }
-  }
-}
-
-template <typename TestFixtureType>
-void IterateOverBlocks(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kLargeOuterSize, Preallocation::kIndexNext},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kLargeOuterSize, Preallocation::kIndexNext},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kLargeOuterSize, Preallocation::kIndexNext},
-      {Preallocation::kSizeRemaining, Preallocation::kIndexFree},
-  });
-
-  // Count the blocks. The unallocated ones vary in size, but the allocated ones
-  // should all be the same.
-  size_t free_count = 0;
-  size_t used_count = 0;
-  for (auto* block : allocator.blocks()) {
-    if (block->Used()) {
-      EXPECT_EQ(block->InnerSize(), kLargeInnerSize);
-      ++used_count;
-    } else {
-      ++free_count;
-    }
-  }
-  EXPECT_EQ(used_count, 3U);
-  EXPECT_EQ(free_count, 4U);
-}
-TEST_FOREACH_STRATEGY(IterateOverBlocks)
-
-template <typename TestFixtureType>
-void ResizeNull(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  size_t new_size = 1;
-  EXPECT_FALSE(allocator.Resize(nullptr, new_size));
-}
-TEST_FOREACH_STRATEGY(ResizeNull)
-
-template <typename TestFixtureType>
-void ResizeLargeSame(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, 0},
-      {kLargeOuterSize, 1},
-  });
-  size_t new_size = kLargeInnerSize;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kLargeInnerSize);
-}
-TEST_FOREACH_STRATEGY(ResizeLargeSame)
-
-template <typename TestFixtureType>
-void ResizeLargeSmaller(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, 0},
-      {kLargeOuterSize, 1},
-  });
-  size_t new_size = kSmallInnerSize;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kSmallInnerSize);
-}
-TEST_FOREACH_STRATEGY(ResizeLargeSmaller)
-
-template <typename TestFixtureType>
-void ResizeLargeLarger(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, 0},
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kSmallOuterSize, 2},
-  });
-  size_t new_size = kLargeInnerSize * 2;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kLargeInnerSize * 2);
-}
-TEST_FOREACH_STRATEGY(ResizeLargeLarger)
-
-template <typename TestFixtureType>
-void ResizeLargeLargerFailure(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargeOuterSize, 0},
-      {kSmallOuterSize, 12},
-  });
-  // Memory after ptr is already allocated, so `Resize` should fail.
-  size_t new_size = kLargeInnerSize * 2;
-  EXPECT_FALSE(allocator.Resize(test_fixture[0], new_size));
-}
-TEST_FOREACH_STRATEGY(ResizeLargeLargerFailure)
-
-template <typename TestFixtureType>
-void ResizeSmallSame(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kSmallOuterSize, 0},
-      {kSmallOuterSize, 1},
-  });
-  size_t new_size = kSmallInnerSize;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kSmallInnerSize);
-}
-TEST_FOREACH_STRATEGY(ResizeSmallSame)
-
-template <typename TestFixtureType>
-void ResizeSmallSmaller(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kSmallOuterSize, 0},
-      {kSmallOuterSize, 1},
-  });
-  size_t new_size = kSmallInnerSize / 2;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kSmallInnerSize / 2);
-}
-TEST_FOREACH_STRATEGY(ResizeSmallSmaller)
-
-template <typename TestFixtureType>
-void ResizeSmallLarger(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kSmallOuterSize, 0},
-      {kSmallOuterSize, Preallocation::kIndexFree},
-      {kSmallOuterSize, 2},
-  });
-  size_t new_size = kSmallInnerSize * 2;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kSmallInnerSize * 2);
-}
-TEST_FOREACH_STRATEGY(ResizeSmallLarger)
-
-template <typename TestFixtureType>
-void ResizeSmallLargerFailure(TestFixtureType& test_fixture) {
-  using BlockType = typename TestFixtureType::BlockType;
-  auto& allocator = test_fixture.GetAllocator({
-      {kSmallOuterSize, 0},
-      {kSmallOuterSize, 1},
-  });
-  // Memory after ptr is already allocated, so `Resize` should fail.
-  size_t new_size = kSmallInnerSize * 2 + BlockType::kBlockOverhead;
-  EXPECT_FALSE(allocator.Resize(test_fixture[0], new_size));
-}
-TEST_FOREACH_STRATEGY(ResizeSmallLargerFailure)
-
-TEST(DualFirstFit, ResizeLargeSmallerAcrossThreshold) {
-  TestFixture<DualFirstFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({{kDualFitThreshold * 2, 0}});
-  // Shrinking succeeds, and the pointer is unchanged even though it is now
-  // below the threshold.
-  size_t new_size = kDualFitThreshold / 2;
-  ASSERT_TRUE(allocator.Resize(test_fixture[0], new_size));
-  UseMemory(test_fixture[0], kDualFitThreshold / 2);
-}
-
-TEST(DualFirstFit, ResizeSmallLargerAcrossThreshold) {
-  TestFixture<DualFirstFitBlockAllocator<OffsetType>> test_fixture;
-  auto& allocator = test_fixture.GetAllocator({
-      {Preallocation::kSizeRemaining, Preallocation::kIndexNext},
-      {kDualFitThreshold / 2, 1},
-      {kDualFitThreshold * 2, Preallocation::kIndexFree},
-  });
-  // Growing succeeds, and the pointer is unchanged even though it is now
-  // above the threshold.
-  size_t new_size = kDualFitThreshold * 2;
-  ASSERT_TRUE(allocator.Resize(test_fixture[1], new_size));
-  UseMemory(test_fixture[1], kDualFitThreshold * 2);
-}
-
-template <typename TestFixtureType>
-void CanGetLayoutFromValidPointer(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator();
-  constexpr size_t kAlignment = 64;
-  test_fixture[0] = allocator.Allocate(Layout(kLargeInnerSize, kAlignment * 2));
-  ASSERT_NE(test_fixture[0], nullptr);
-
-  test_fixture[1] = allocator.Allocate(Layout(kSmallInnerSize, kAlignment / 2));
-  ASSERT_NE(test_fixture[1], nullptr);
-
-  Result<Layout> result0 = allocator.GetLayout(test_fixture[0]);
-  ASSERT_EQ(result0.status(), OkStatus());
-  EXPECT_GE(result0->size(), kLargeInnerSize);
-  EXPECT_EQ(result0->alignment(), kAlignment * 2);
-
-  Result<Layout> result1 = allocator.GetLayout(test_fixture[1]);
-  ASSERT_EQ(result1.status(), OkStatus());
-  EXPECT_GE(result1->size(), kSmallInnerSize);
-  EXPECT_EQ(result1->alignment(), kAlignment / 2);
-}
-TEST_FOREACH_STRATEGY(CanGetLayoutFromValidPointer)
-
-template <typename TestFixtureType>
-void CannotGetLayoutFromInvalidPointer(TestFixtureType& test_fixture) {
-  auto& allocator = test_fixture.GetAllocator({
-      {kLargerOuterSize, 0},
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kSmallOuterSize, 2},
-      {kSmallerOuterSize, Preallocation::kIndexFree},
-      {kSmallOuterSize, 4},
-      {kLargeOuterSize, Preallocation::kIndexFree},
-      {kLargerOuterSize, 6},
-  });
-
-  Result<Layout> result0 = allocator.GetLayout(nullptr);
-  EXPECT_EQ(result0.status(), Status::NotFound());
-
-  for (const auto& block : allocator.blocks()) {
-    if (!block->Used()) {
-      Result<Layout> result1 = allocator.GetLayout(block->UsableSpace());
-      EXPECT_EQ(result1.status(), Status::FailedPrecondition());
-    }
-  }
-}
-TEST_FOREACH_STRATEGY(CannotGetLayoutFromInvalidPointer)
-
-}  // namespace
-}  // namespace pw::allocator
diff --git a/pw_allocator/block_allocator_testing.cc b/pw_allocator/block_allocator_testing.cc
new file mode 100644
index 0000000..0d89b0e
--- /dev/null
+++ b/pw_allocator/block_allocator_testing.cc
@@ -0,0 +1,415 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_allocator/block_allocator_testing.h"
+
+#include "pw_assert/check.h"
+#include "pw_bytes/alignment.h"
+#include "pw_status/status.h"
+
+namespace pw::allocator::test {
+
+// Test fixtures.
+
+BlockAllocatorTest::BlockAllocatorTest(BlockAllocatorType& allocator)
+    : ::testing::Test(), allocator_(allocator) {}
+
+void BlockAllocatorTest::SetUp() { ptrs_.fill(nullptr); }
+
+ByteSpan BlockAllocatorTest::GetBytes() {
+  return ByteSpan(buffer_.data(), buffer_.size());
+}
+
+BlockAllocatorTest::BlockAllocatorType& BlockAllocatorTest::GetAllocator() {
+  EXPECT_EQ(allocator_.Init(GetBytes()), OkStatus());
+  return allocator_;
+}
+
+BlockAllocatorTest::BlockAllocatorType& BlockAllocatorTest::GetAllocator(
+    std::initializer_list<Preallocation> preallocations) {
+  // First, look if any blocks use kSizeRemaining, and calculate how large
+  // that will be.
+  size_t remaining_outer_size = kCapacity;
+  for (auto& preallocation : preallocations) {
+    if (preallocation.outer_size != Preallocation::kSizeRemaining) {
+      size_t outer_size =
+          AlignUp(preallocation.outer_size, BlockType::kAlignment);
+      PW_CHECK_INT_GE(remaining_outer_size, outer_size);
+      remaining_outer_size -= outer_size;
+    }
+  }
+
+  Result<BlockType*> result = BlockType::Init(GetBytes());
+  PW_CHECK_OK(result.status());
+  BlockType* block = *result;
+  void* begin = block->UsableSpace();
+
+  // To prevent free blocks being merged back into the block of available
+  // space, treat the available space as being used.
+  block->MarkUsed();
+
+  size_t next_index = 0;
+  for (auto& preallocation : preallocations) {
+    PW_CHECK_NOTNULL(block);
+
+    // Perform the allocation.
+    size_t outer_size = preallocation.outer_size;
+    if (outer_size == Preallocation::kSizeRemaining) {
+      outer_size = remaining_outer_size;
+      remaining_outer_size = 0;
+    }
+    size_t inner_size = outer_size - BlockType::kBlockOverhead;
+
+    block->MarkFree();
+    PW_CHECK_OK(BlockType::AllocFirst(block, inner_size, 1));
+    if (!block->Last()) {
+      block->Next()->MarkUsed();
+    }
+
+    // Free the block or cache the allocated pointer.
+    if (preallocation.index == Preallocation::kIndexFree) {
+      BlockType::Free(block);
+
+    } else if (preallocation.index == Preallocation::kIndexNext) {
+      while (true) {
+        PW_CHECK_INT_LT(next_index, kNumPtrs);
+        if (Fetch(next_index) == nullptr &&
+            std::all_of(preallocations.begin(),
+                        preallocations.end(),
+                        [next_index](const Preallocation& other) {
+                          return other.index != next_index;
+                        })) {
+          break;
+        }
+        ++next_index;
+      }
+      Store(next_index, block->UsableSpace());
+
+    } else {
+      Store(preallocation.index, block->UsableSpace());
+    }
+    block = block->Next();
+  }
+  if (block != nullptr) {
+    block->MarkFree();
+  }
+  block = BlockType::FromUsableSpace(begin);
+  PW_CHECK_OK(allocator_.Init(block));
+  return allocator_;
+}
+
+void* BlockAllocatorTest::NextAfter(size_t index) {
+  if (index > ptrs_.size()) {
+    return nullptr;
+  }
+  auto* block = BlockType::FromUsableSpace(Fetch(index));
+  while (!block->Last()) {
+    block = block->Next();
+    if (block->Used()) {
+      return block->UsableSpace();
+    }
+  }
+  return nullptr;
+}
+
+void BlockAllocatorTest::Store(size_t index, void* ptr) { ptrs_[index] = ptr; }
+
+void* BlockAllocatorTest::Fetch(size_t index) { return ptrs_[index]; }
+
+void BlockAllocatorTest::UseMemory(void* ptr, size_t size) {
+  std::memset(ptr, 0x5a, size);
+}
+
+void BlockAllocatorTest::TearDown() {
+  for (size_t i = 0; i < kNumPtrs; ++i) {
+    if (Fetch(i) != nullptr) {
+      allocator_.Deallocate(Fetch(i));
+    }
+  }
+  allocator_.Reset();
+}
+
+// Unit tests.
+
+void BlockAllocatorTest::CanAutomaticallyInit(BlockAllocatorType& allocator) {
+  EXPECT_NE(*(allocator.blocks().begin()), nullptr);
+}
+
+void BlockAllocatorTest::CanExplicitlyInit(BlockAllocatorType& allocator) {
+  EXPECT_EQ(*(allocator.blocks().begin()), nullptr);
+  EXPECT_EQ(allocator.Init(GetBytes()), OkStatus());
+  EXPECT_NE(*(allocator.blocks().begin()), nullptr);
+}
+
+void BlockAllocatorTest::GetCapacity() {
+  auto& allocator = GetAllocator();
+  StatusWithSize capacity = allocator.GetCapacity();
+  EXPECT_EQ(capacity.status(), OkStatus());
+  EXPECT_EQ(capacity.size(), kCapacity);
+}
+
+void BlockAllocatorTest::AllocateLarge() {
+  auto& allocator = GetAllocator();
+  constexpr Layout layout = Layout::Of<std::byte[kLargeInnerSize]>();
+  Store(0, allocator.Allocate(layout));
+  ASSERT_NE(Fetch(0), nullptr);
+  ByteSpan bytes = GetBytes();
+  EXPECT_GE(Fetch(0), bytes.data());
+  EXPECT_LE(Fetch(0), bytes.data() + bytes.size());
+  UseMemory(Fetch(0), layout.size());
+}
+
+void BlockAllocatorTest::AllocateSmall() {
+  auto& allocator = GetAllocator();
+  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
+  Store(0, allocator.Allocate(layout));
+  ASSERT_NE(Fetch(0), nullptr);
+  ByteSpan bytes = GetBytes();
+  EXPECT_GE(Fetch(0), bytes.data());
+  EXPECT_LE(Fetch(0), bytes.data() + bytes.size());
+  UseMemory(Fetch(0), layout.size());
+}
+
+void BlockAllocatorTest::AllocateTooLarge() {
+  auto& allocator = GetAllocator();
+  Store(0, allocator.Allocate(Layout::Of<std::byte[kCapacity * 2]>()));
+  EXPECT_EQ(Fetch(0), nullptr);
+}
+
+void BlockAllocatorTest::AllocateLargeAlignment() {
+  auto& allocator = GetAllocator();
+  constexpr size_t kAlignment = 64;
+  Store(0, allocator.Allocate(Layout(kLargeInnerSize, kAlignment)));
+  ASSERT_NE(Fetch(0), nullptr);
+  EXPECT_EQ(reinterpret_cast<uintptr_t>(Fetch(0)) % kAlignment, 0U);
+  UseMemory(Fetch(0), kLargeInnerSize);
+
+  Store(1, allocator.Allocate(Layout(kLargeInnerSize, kAlignment)));
+  ASSERT_NE(Fetch(1), nullptr);
+  EXPECT_EQ(reinterpret_cast<uintptr_t>(Fetch(1)) % kAlignment, 0U);
+  UseMemory(Fetch(1), kLargeInnerSize);
+}
+
+void BlockAllocatorTest::AllocateAlignmentFailure() {
+  // Allocate a two blocks with an unaligned region between them.
+  constexpr size_t kAlignment = 128;
+  ByteSpan bytes = GetBytes();
+  auto addr = reinterpret_cast<uintptr_t>(bytes.data());
+  uintptr_t outer_size =
+      AlignUp(addr + BlockType::kBlockOverhead, kAlignment) - addr + 1;
+  auto& allocator = GetAllocator({
+      {outer_size, 0},
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {Preallocation::kSizeRemaining, 2},
+  });
+
+  // The allocator should be unable to create an aligned region..
+  Store(1, allocator.Allocate(Layout(kLargeInnerSize, kAlignment)));
+  EXPECT_EQ(Fetch(1), nullptr);
+}
+
+void BlockAllocatorTest::DeallocateNull() {
+  auto& allocator = GetAllocator();
+  allocator.Deallocate(nullptr);
+}
+
+void BlockAllocatorTest::DeallocateShuffled() {
+  auto& allocator = GetAllocator();
+  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
+  for (size_t i = 0; i < kNumPtrs; ++i) {
+    Store(i, allocator.Allocate(layout));
+    if (Fetch(i) == nullptr) {
+      break;
+    }
+  }
+
+  // Mix up the order of allocations.
+  for (size_t i = 0; i < kNumPtrs; ++i) {
+    if (i % 2 == 0 && i + 1 < kNumPtrs) {
+      void* tmp = Fetch(i);
+      Store(i, Fetch(i + 1));
+      Store(i + 1, tmp);
+    }
+    if (i % 3 == 0 && i + 2 < kNumPtrs) {
+      void* tmp = Fetch(i);
+      Store(i, Fetch(i + 2));
+      Store(i + 2, tmp);
+    }
+  }
+
+  // Deallocate everything.
+  for (size_t i = 0; i < kNumPtrs; ++i) {
+    allocator.Deallocate(Fetch(i));
+    Store(i, nullptr);
+  }
+}
+
+void BlockAllocatorTest::IterateOverBlocks() {
+  auto& allocator = GetAllocator({
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kLargeOuterSize, Preallocation::kIndexNext},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kLargeOuterSize, Preallocation::kIndexNext},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kLargeOuterSize, Preallocation::kIndexNext},
+      {Preallocation::kSizeRemaining, Preallocation::kIndexFree},
+  });
+
+  // Count the blocks. The unallocated ones vary in size, but the allocated ones
+  // should all be the same.
+  size_t free_count = 0;
+  size_t used_count = 0;
+  for (auto* block : allocator.blocks()) {
+    if (block->Used()) {
+      EXPECT_EQ(block->InnerSize(), kLargeInnerSize);
+      ++used_count;
+    } else {
+      ++free_count;
+    }
+  }
+  EXPECT_EQ(used_count, 3U);
+  EXPECT_EQ(free_count, 4U);
+}
+
+void BlockAllocatorTest::ResizeNull() {
+  auto& allocator = GetAllocator();
+  size_t new_size = 1;
+  EXPECT_FALSE(allocator.Resize(nullptr, new_size));
+}
+
+void BlockAllocatorTest::ResizeLargeSame() {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, 0},
+      {kLargeOuterSize, 1},
+  });
+  size_t new_size = kLargeInnerSize;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kLargeInnerSize);
+}
+
+void BlockAllocatorTest::ResizeLargeSmaller() {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, 0},
+      {kLargeOuterSize, 1},
+  });
+  size_t new_size = kSmallInnerSize;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kSmallInnerSize);
+}
+
+void BlockAllocatorTest::ResizeLargeLarger() {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, 0},
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kSmallOuterSize, 2},
+  });
+  size_t new_size = kLargeInnerSize * 2;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kLargeInnerSize * 2);
+}
+
+void BlockAllocatorTest::ResizeLargeLargerFailure() {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, 0},
+      {kSmallOuterSize, 12},
+  });
+  // Memory after ptr is already allocated, so `Resize` should fail.
+  size_t new_size = kLargeInnerSize * 2;
+  EXPECT_FALSE(allocator.Resize(Fetch(0), new_size));
+}
+
+void BlockAllocatorTest::ResizeSmallSame() {
+  auto& allocator = GetAllocator({
+      {kSmallOuterSize, 0},
+      {kSmallOuterSize, 1},
+  });
+  size_t new_size = kSmallInnerSize;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kSmallInnerSize);
+}
+
+void BlockAllocatorTest::ResizeSmallSmaller() {
+  auto& allocator = GetAllocator({
+      {kSmallOuterSize, 0},
+      {kSmallOuterSize, 1},
+  });
+  size_t new_size = kSmallInnerSize / 2;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kSmallInnerSize / 2);
+}
+
+void BlockAllocatorTest::ResizeSmallLarger() {
+  auto& allocator = GetAllocator({
+      {kSmallOuterSize, 0},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kSmallOuterSize, 2},
+  });
+  size_t new_size = kSmallInnerSize * 2;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kSmallInnerSize * 2);
+}
+
+void BlockAllocatorTest::ResizeSmallLargerFailure() {
+  auto& allocator = GetAllocator({
+      {kSmallOuterSize, 0},
+      {kSmallOuterSize, 1},
+  });
+  // Memory after ptr is already allocated, so `Resize` should fail.
+  size_t new_size = kSmallInnerSize * 2 + BlockType::kBlockOverhead;
+  EXPECT_FALSE(allocator.Resize(Fetch(0), new_size));
+}
+
+void BlockAllocatorTest::CanGetLayoutFromValidPointer() {
+  auto& allocator = GetAllocator();
+  constexpr size_t kAlignment = 64;
+  Store(0, allocator.Allocate(Layout(kLargeInnerSize, kAlignment * 2)));
+  ASSERT_NE(Fetch(0), nullptr);
+
+  Store(1, allocator.Allocate(Layout(kSmallInnerSize, kAlignment / 2)));
+  ASSERT_NE(Fetch(1), nullptr);
+
+  Result<Layout> result0 = allocator.GetLayout(Fetch(0));
+  ASSERT_EQ(result0.status(), OkStatus());
+  EXPECT_GE(result0->size(), kLargeInnerSize);
+  EXPECT_EQ(result0->alignment(), kAlignment * 2);
+
+  Result<Layout> result1 = allocator.GetLayout(Fetch(1));
+  ASSERT_EQ(result1.status(), OkStatus());
+  EXPECT_GE(result1->size(), kSmallInnerSize);
+  EXPECT_EQ(result1->alignment(), kAlignment / 2);
+}
+
+void BlockAllocatorTest::CannotGetLayoutFromInvalidPointer() {
+  auto& allocator = GetAllocator({
+      {kLargerOuterSize, 0},
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kSmallOuterSize, 2},
+      {kSmallerOuterSize, Preallocation::kIndexFree},
+      {kSmallOuterSize, 4},
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kLargerOuterSize, 6},
+  });
+
+  Result<Layout> result0 = allocator.GetLayout(nullptr);
+  EXPECT_EQ(result0.status(), Status::NotFound());
+
+  for (const auto& block : allocator.blocks()) {
+    if (!block->Used()) {
+      Result<Layout> result1 = allocator.GetLayout(block->UsableSpace());
+      EXPECT_EQ(result1.status(), Status::FailedPrecondition());
+    }
+  }
+}
+
+}  // namespace pw::allocator::test
diff --git a/pw_allocator/dual_first_fit_block_allocator_test.cc b/pw_allocator/dual_first_fit_block_allocator_test.cc
new file mode 100644
index 0000000..ed72c9b
--- /dev/null
+++ b/pw_allocator/dual_first_fit_block_allocator_test.cc
@@ -0,0 +1,167 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_allocator/dual_first_fit_block_allocator.h"
+
+#include "pw_allocator/block_allocator_testing.h"
+#include "pw_unit_test/framework.h"
+
+namespace pw::allocator {
+namespace {
+
+// Test fixtures.
+
+using BlockAllocatorTest = test::BlockAllocatorTest;
+using Preallocation = test::Preallocation;
+using DualFirstFitBlockAllocatorType =
+    DualFirstFitBlockAllocator<BlockAllocatorTest::OffsetType>;
+
+// Minimum size of a "large" allocation; allocation less than this size are
+// considered "small" when using the DualFirstFit strategy.
+static constexpr size_t kDualFitThreshold =
+    BlockAllocatorTest::kSmallInnerSize * 2;
+
+class DualFirstFitBlockAllocatorTest : public BlockAllocatorTest {
+ public:
+  DualFirstFitBlockAllocatorTest() : BlockAllocatorTest(allocator_) {}
+
+ private:
+  DualFirstFitBlockAllocatorType allocator_;
+};
+
+// Unit tests.
+
+TEST_F(DualFirstFitBlockAllocatorTest, CanAutomaticallyInit) {
+  DualFirstFitBlockAllocatorType allocator(GetBytes(), kDualFitThreshold);
+  CanAutomaticallyInit(allocator);
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, CanExplicitlyInit) {
+  DualFirstFitBlockAllocatorType allocator;
+  CanExplicitlyInit(allocator);
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, GetCapacity) { GetCapacity(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, AllocateLarge) { AllocateLarge(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, AllocateSmall) { AllocateSmall(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, AllocateLargeAlignment) {
+  AllocateLargeAlignment();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, AllocateAlignmentFailure) {
+  AllocateAlignmentFailure();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, AllocatesUsingThreshold) {
+  auto& allocator = GetAllocator({
+      {kLargerOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 1},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {Preallocation::kSizeRemaining, 3},
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 5},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+  });
+  auto& dual_first_fit_block_allocator =
+      static_cast<DualFirstFitBlockAllocatorType&>(allocator);
+  dual_first_fit_block_allocator.set_threshold(kDualFitThreshold);
+
+  Store(0, allocator.Allocate(Layout(kLargeInnerSize, 1)));
+  EXPECT_EQ(NextAfter(0), Fetch(1));
+  Store(4, allocator.Allocate(Layout(kLargeInnerSize, 1)));
+  EXPECT_EQ(NextAfter(3), Fetch(4));
+  EXPECT_EQ(NextAfter(4), Fetch(5));
+  Store(6, allocator.Allocate(Layout(kSmallInnerSize, 1)));
+  EXPECT_EQ(NextAfter(5), Fetch(6));
+  EXPECT_EQ(NextAfter(6), Fetch(7));
+  Store(2, allocator.Allocate(Layout(kSmallInnerSize, 1)));
+  EXPECT_EQ(NextAfter(1), Fetch(2));
+  EXPECT_EQ(NextAfter(2), Fetch(3));
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, DeallocateNull) { DeallocateNull(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, DeallocateShuffled) {
+  DeallocateShuffled();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, IterateOverBlocks) {
+  IterateOverBlocks();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeNull) { ResizeNull(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeLargeSame) { ResizeLargeSame(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeLargeSmaller) {
+  ResizeLargeSmaller();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeLargeLarger) {
+  ResizeLargeLarger();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeLargeLargerFailure) {
+  ResizeLargeLargerFailure();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeSmallSame) { ResizeSmallSame(); }
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeSmallSmaller) {
+  ResizeSmallSmaller();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeSmallLarger) {
+  ResizeSmallLarger();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeSmallLargerFailure) {
+  ResizeSmallLargerFailure();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, CanGetLayoutFromValidPointer) {
+  CanGetLayoutFromValidPointer();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, CannotGetLayoutFromInvalidPointer) {
+  CannotGetLayoutFromInvalidPointer();
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeLargeSmallerAcrossThreshold) {
+  auto& allocator = GetAllocator({{kDualFitThreshold * 2, 0}});
+  // Shrinking succeeds, and the pointer is unchanged even though it is now
+  // below the threshold.
+  size_t new_size = kDualFitThreshold / 2;
+  ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
+  UseMemory(Fetch(0), kDualFitThreshold / 2);
+}
+
+TEST_F(DualFirstFitBlockAllocatorTest, ResizeSmallLargerAcrossThreshold) {
+  auto& allocator = GetAllocator({
+      {Preallocation::kSizeRemaining, Preallocation::kIndexNext},
+      {kDualFitThreshold / 2, 1},
+      {kDualFitThreshold * 2, Preallocation::kIndexFree},
+  });
+  // Growing succeeds, and the pointer is unchanged even though it is now
+  // above the threshold.
+  size_t new_size = kDualFitThreshold * 2;
+  ASSERT_TRUE(allocator.Resize(Fetch(1), new_size));
+  UseMemory(Fetch(1), kDualFitThreshold * 2);
+}
+
+}  // namespace
+}  // namespace pw::allocator
diff --git a/pw_allocator/first_fit_block_allocator_test.cc b/pw_allocator/first_fit_block_allocator_test.cc
new file mode 100644
index 0000000..2773c37
--- /dev/null
+++ b/pw_allocator/first_fit_block_allocator_test.cc
@@ -0,0 +1,208 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_allocator/first_fit_block_allocator.h"
+
+#include "pw_allocator/block_allocator_testing.h"
+#include "pw_allocator/buffer.h"
+#include "pw_unit_test/framework.h"
+
+namespace pw::allocator {
+namespace {
+
+using BlockAllocatorTest = test::BlockAllocatorTest;
+using Preallocation = test::Preallocation;
+using FirstFitBlockAllocatorType =
+    FirstFitBlockAllocator<BlockAllocatorTest::OffsetType>;
+
+class FirstFitBlockAllocatorTest : public BlockAllocatorTest {
+ public:
+  FirstFitBlockAllocatorTest() : BlockAllocatorTest(allocator_) {}
+
+ private:
+  FirstFitBlockAllocatorType allocator_;
+};
+
+TEST_F(FirstFitBlockAllocatorTest, CanAutomaticallyInit) {
+  FirstFitBlockAllocatorType allocator(GetBytes());
+  CanAutomaticallyInit(allocator);
+}
+
+TEST_F(FirstFitBlockAllocatorTest, CanExplicitlyInit) {
+  FirstFitBlockAllocatorType allocator;
+  CanExplicitlyInit(allocator);
+}
+
+TEST_F(FirstFitBlockAllocatorTest, GetCapacity) { GetCapacity(); }
+
+TEST_F(FirstFitBlockAllocatorTest, AllocateLarge) { AllocateLarge(); }
+
+TEST_F(FirstFitBlockAllocatorTest, AllocateSmall) { AllocateSmall(); }
+
+TEST_F(FirstFitBlockAllocatorTest, AllocateLargeAlignment) {
+  AllocateLargeAlignment();
+}
+
+TEST_F(FirstFitBlockAllocatorTest, AllocateAlignmentFailure) {
+  AllocateAlignmentFailure();
+}
+
+TEST_F(FirstFitBlockAllocatorTest, AllocatesFirstCompatible) {
+  auto& allocator = GetAllocator({
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 1},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 3},
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {Preallocation::kSizeRemaining, 5},
+  });
+
+  Store(0, allocator.Allocate(Layout(kSmallInnerSize, 1)));
+  EXPECT_EQ(NextAfter(0), Fetch(1));
+  Store(4, allocator.Allocate(Layout(kLargeInnerSize, 1)));
+  EXPECT_EQ(NextAfter(3), Fetch(4));
+  EXPECT_EQ(NextAfter(4), Fetch(5));
+}
+
+TEST_F(FirstFitBlockAllocatorTest, DeallocateNull) { DeallocateNull(); }
+
+TEST_F(FirstFitBlockAllocatorTest, DeallocateShuffled) { DeallocateShuffled(); }
+
+TEST_F(FirstFitBlockAllocatorTest, IterateOverBlocks) { IterateOverBlocks(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeNull) { ResizeNull(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeLargeSame) { ResizeLargeSame(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeLargeSmaller) { ResizeLargeSmaller(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeLargeLarger) { ResizeLargeLarger(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeLargeLargerFailure) {
+  ResizeLargeLargerFailure();
+}
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeSmallSame) { ResizeSmallSame(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeSmallSmaller) { ResizeSmallSmaller(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeSmallLarger) { ResizeSmallLarger(); }
+
+TEST_F(FirstFitBlockAllocatorTest, ResizeSmallLargerFailure) {
+  ResizeSmallLargerFailure();
+}
+
+TEST_F(FirstFitBlockAllocatorTest, CanGetLayoutFromValidPointer) {
+  CanGetLayoutFromValidPointer();
+}
+
+TEST_F(FirstFitBlockAllocatorTest, CannotGetLayoutFromInvalidPointer) {
+  CannotGetLayoutFromInvalidPointer();
+}
+
+TEST_F(FirstFitBlockAllocatorTest, DisablePoisoning) {
+  auto& allocator = GetAllocator();
+  constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
+
+  // Allocate 3 blocks to prevent the middle one from being merged when freed.
+  std::array<void*, 3> ptrs;
+  for (auto& ptr : ptrs) {
+    ptr = allocator.Allocate(layout);
+    ASSERT_NE(ptr, nullptr);
+  }
+
+  // Modify the contents of the block and check if it is still valid.
+  auto* bytes = std::launder(reinterpret_cast<uint8_t*>(ptrs[1]));
+  auto* block = BlockType::FromUsableSpace(bytes);
+  allocator.Deallocate(bytes);
+  EXPECT_FALSE(block->Used());
+  EXPECT_TRUE(block->IsValid());
+  bytes[0] = ~bytes[0];
+  EXPECT_TRUE(block->IsValid());
+
+  allocator.Deallocate(ptrs[0]);
+  allocator.Deallocate(ptrs[2]);
+}
+
+TEST(PoisonedFirstFitBlockAllocatorTest, PoisonEveryFreeBlock) {
+  using PoisonedFirstFitBlockAllocator = FirstFitBlockAllocator<uintptr_t, 1>;
+  using BlockType = PoisonedFirstFitBlockAllocator::BlockType;
+
+  WithBuffer<PoisonedFirstFitBlockAllocator,
+             FirstFitBlockAllocatorTest::kCapacity>
+      allocator;
+  EXPECT_EQ(allocator->Init(allocator.as_bytes()), OkStatus());
+  constexpr Layout layout =
+      Layout::Of<std::byte[FirstFitBlockAllocatorTest::kSmallInnerSize]>();
+
+  // Allocate 3 blocks to prevent the middle one from being merged when freed.
+  std::array<void*, 3> ptrs;
+  for (auto& ptr : ptrs) {
+    ptr = allocator->Allocate(layout);
+    ASSERT_NE(ptr, nullptr);
+  }
+  // Modify the contents of the block and check if it is still valid.
+  auto* bytes = std::launder(reinterpret_cast<uint8_t*>(ptrs[1]));
+  auto* block = BlockType::FromUsableSpace(bytes);
+  allocator->Deallocate(bytes);
+  EXPECT_FALSE(block->Used());
+  EXPECT_TRUE(block->IsValid());
+  bytes[0] = ~bytes[0];
+  EXPECT_FALSE(block->IsValid());
+
+  allocator->Deallocate(ptrs[0]);
+  allocator->Deallocate(ptrs[2]);
+}
+
+TEST(PoisonedFirstFitBlockAllocatorTest, PoisonPeriodically) {
+  using PoisonedFirstFitBlockAllocator = FirstFitBlockAllocator<uintptr_t, 4>;
+  using BlockType = PoisonedFirstFitBlockAllocator::BlockType;
+
+  WithBuffer<PoisonedFirstFitBlockAllocator,
+             FirstFitBlockAllocatorTest::kCapacity>
+      allocator;
+  EXPECT_EQ(allocator->Init(allocator.as_bytes()), OkStatus());
+  constexpr Layout layout =
+      Layout::Of<std::byte[FirstFitBlockAllocatorTest::kSmallInnerSize]>();
+
+  // Allocate 9 blocks to prevent every other from being merged when freed.
+  std::array<void*, 9> ptrs;
+  for (auto& ptr : ptrs) {
+    ptr = allocator->Allocate(layout);
+    ASSERT_NE(ptr, nullptr);
+  }
+
+  for (size_t i = 1; i < ptrs.size(); i += 2) {
+    auto* bytes = std::launder(reinterpret_cast<uint8_t*>(ptrs[i]));
+    auto* block = BlockType::FromUsableSpace(bytes);
+    allocator->Deallocate(bytes);
+    EXPECT_FALSE(block->Used());
+    EXPECT_TRUE(block->IsValid());
+    bytes[0] = ~bytes[0];
+
+    // Corruption is only detected on the fourth freed block.
+    if (i == 7) {
+      EXPECT_FALSE(block->IsValid());
+    } else {
+      EXPECT_TRUE(block->IsValid());
+    }
+  }
+
+  for (size_t i = 0; i < ptrs.size(); i += 2) {
+    allocator->Deallocate(ptrs[i]);
+  }
+}
+
+}  // namespace
+}  // namespace pw::allocator
diff --git a/pw_allocator/last_fit_block_allocator_test.cc b/pw_allocator/last_fit_block_allocator_test.cc
new file mode 100644
index 0000000..54d22cf
--- /dev/null
+++ b/pw_allocator/last_fit_block_allocator_test.cc
@@ -0,0 +1,118 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_allocator/last_fit_block_allocator.h"
+
+#include "pw_allocator/block_allocator_testing.h"
+#include "pw_unit_test/framework.h"
+
+namespace pw::allocator {
+namespace {
+
+// Test fixtures.
+
+using BlockAllocatorTest = test::BlockAllocatorTest;
+using Preallocation = test::Preallocation;
+using LastFitBlockAllocatorType =
+    LastFitBlockAllocator<BlockAllocatorTest::OffsetType>;
+
+class LastFitBlockAllocatorTest : public BlockAllocatorTest {
+ public:
+  LastFitBlockAllocatorTest() : BlockAllocatorTest(allocator_) {}
+
+ private:
+  LastFitBlockAllocatorType allocator_;
+};
+
+// Unit tests.
+
+TEST_F(LastFitBlockAllocatorTest, CanAutomaticallyInit) {
+  LastFitBlockAllocatorType allocator(GetBytes());
+  CanAutomaticallyInit(allocator);
+}
+
+TEST_F(LastFitBlockAllocatorTest, CanExplicitlyInit) {
+  LastFitBlockAllocatorType allocator;
+  CanExplicitlyInit(allocator);
+}
+
+TEST_F(LastFitBlockAllocatorTest, GetCapacity) { GetCapacity(); }
+
+TEST_F(LastFitBlockAllocatorTest, AllocateLarge) { AllocateLarge(); }
+
+TEST_F(LastFitBlockAllocatorTest, AllocateSmall) { AllocateSmall(); }
+
+TEST_F(LastFitBlockAllocatorTest, AllocateLargeAlignment) {
+  AllocateLargeAlignment();
+}
+
+TEST_F(LastFitBlockAllocatorTest, AllocateAlignmentFailure) {
+  AllocateAlignmentFailure();
+}
+
+TEST_F(LastFitBlockAllocatorTest, AllocatesLastCompatible) {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 1},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 3},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {Preallocation::kSizeRemaining, 5},
+  });
+
+  Store(0, allocator.Allocate(Layout(kLargeInnerSize, 1)));
+  EXPECT_EQ(NextAfter(0), Fetch(1));
+  Store(4, allocator.Allocate(Layout(kSmallInnerSize, 1)));
+  EXPECT_EQ(NextAfter(3), Fetch(4));
+  EXPECT_EQ(NextAfter(4), Fetch(5));
+}
+
+TEST_F(LastFitBlockAllocatorTest, DeallocateNull) { DeallocateNull(); }
+
+TEST_F(LastFitBlockAllocatorTest, DeallocateShuffled) { DeallocateShuffled(); }
+
+TEST_F(LastFitBlockAllocatorTest, IterateOverBlocks) { IterateOverBlocks(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeNull) { ResizeNull(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeLargeSame) { ResizeLargeSame(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeLargeSmaller) { ResizeLargeSmaller(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeLargeLarger) { ResizeLargeLarger(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeLargeLargerFailure) {
+  ResizeLargeLargerFailure();
+}
+
+TEST_F(LastFitBlockAllocatorTest, ResizeSmallSame) { ResizeSmallSame(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeSmallSmaller) { ResizeSmallSmaller(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeSmallLarger) { ResizeSmallLarger(); }
+
+TEST_F(LastFitBlockAllocatorTest, ResizeSmallLargerFailure) {
+  ResizeSmallLargerFailure();
+}
+
+TEST_F(LastFitBlockAllocatorTest, CanGetLayoutFromValidPointer) {
+  CanGetLayoutFromValidPointer();
+}
+
+TEST_F(LastFitBlockAllocatorTest, CannotGetLayoutFromInvalidPointer) {
+  CannotGetLayoutFromInvalidPointer();
+}
+
+}  // namespace
+}  // namespace pw::allocator
diff --git a/pw_allocator/public/pw_allocator/best_fit_block_allocator.h b/pw_allocator/public/pw_allocator/best_fit_block_allocator.h
new file mode 100644
index 0000000..ad7452b
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/best_fit_block_allocator.h
@@ -0,0 +1,62 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_allocator/block_allocator_base.h"
+
+namespace pw::allocator {
+
+/// Block allocator that uses a "best-fit" allocation strategy.
+///
+/// In this strategy, the allocator handles an allocation request by looking at
+/// all unused blocks and finding the smallest one which can satisfy the
+/// request.
+///
+/// This algorithm may make better use of available memory by wasting less on
+/// unused fragments, but may also lead to worse fragmentation as those
+/// fragments are more likely to be too small to be useful to other requests.
+template <typename OffsetType = uintptr_t,
+          size_t kPoisonInterval = 0,
+          size_t kAlign = alignof(OffsetType)>
+class BestFitBlockAllocator
+    : public BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
+ public:
+  using Base = BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
+  using BlockType = typename Base::BlockType;
+
+  constexpr BestFitBlockAllocator() : Base() {}
+  explicit BestFitBlockAllocator(ByteSpan region) : Base(region) {}
+
+ private:
+  /// @copydoc Allocator::Allocate
+  BlockType* ChooseBlock(Layout layout) override {
+    // Search backwards for the smallest block that can hold this allocation.
+    BlockType* best = nullptr;
+    for (auto* block : Base::rblocks()) {
+      if (!block->CanAllocLast(layout.size(), layout.alignment()).ok()) {
+        continue;
+      }
+      if (best == nullptr || block->OuterSize() < best->OuterSize()) {
+        best = block;
+      }
+    }
+    if (best != nullptr &&
+        BlockType::AllocLast(best, layout.size(), layout.alignment()).ok()) {
+      return best;
+    }
+    return nullptr;
+  }
+};
+
+}  // namespace pw::allocator
diff --git a/pw_allocator/public/pw_allocator/block_allocator.h b/pw_allocator/public/pw_allocator/block_allocator.h
index 5d2d602..56322fa 100644
--- a/pw_allocator/public/pw_allocator/block_allocator.h
+++ b/pw_allocator/public/pw_allocator/block_allocator.h
@@ -13,584 +13,13 @@
 // the License.
 #pragma once
 
-#include <cstddef>
-#include <cstdint>
+// This file exists merely to allow block allocator users to migrate to
+// including specific block allocator header files directly. New users should
+// not use this header, and it will eventually be deprecated.
 
-#include "pw_allocator/allocator.h"
-#include "pw_allocator/block.h"
-#include "pw_allocator/capability.h"
-#include "pw_bytes/span.h"
-#include "pw_result/result.h"
-#include "pw_status/status.h"
-
-namespace pw::allocator {
-namespace internal {
-
-/// Block-independent base class of ``BlockAllocator``.
-///
-/// This class contains static methods which do not depend on the template
-/// parameters of ``BlockAllocator`` that are used to determine block/ type.
-/// This allows the methods to be defined in a separate source file and use
-/// macros that cannot be used in headers, e.g. PW_CHECK.
-///
-/// This class should not be used directly. Instead, use ``BlockAllocator`` or
-/// one of its specializations.
-class GenericBlockAllocator : public Allocator {
- public:
-  static constexpr Capabilities kCapabilities = kImplementsGetUsableLayout |
-                                                kImplementsGetAllocatedLayout |
-                                                kImplementsQuery;
-
-  // Not copyable or movable.
-  GenericBlockAllocator(const GenericBlockAllocator&) = delete;
-  GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
-  GenericBlockAllocator(GenericBlockAllocator&&) = delete;
-  GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
-
- protected:
-  constexpr GenericBlockAllocator() : Allocator(kCapabilities) {}
-
-  /// Crashes with an informational method that the given block is allocated.
-  ///
-  /// This method is meant to be called by ``SplitFreeListAllocator``s
-  /// destructor. There must not be any outstanding allocations from an
-  /// when it is destroyed.
-  static void CrashOnAllocated(void* allocated);
-};
-
-/// A memory allocator that uses a list of blocks.
-///
-/// This class does not implement `ChooseBlock` and cannot be used directly.
-/// Instead, use one of its specializations.
-///
-/// NOTE!! Do NOT use memory returned from this allocator as the backing for
-/// another allocator. If this is done, the `Query` method may incorrectly
-/// think pointers returned by that allocator were created by this one, and
-/// report that this allocator can de/reallocate them.
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-class BlockAllocator : public GenericBlockAllocator {
- public:
-  using BlockType = Block<OffsetType, kAlign, kPoisonInterval != 0>;
-  using Range = typename BlockType::Range;
-
-  /// Constexpr constructor. Callers must explicitly call `Init`.
-  constexpr BlockAllocator() : GenericBlockAllocator() {}
-
-  /// Non-constexpr constructor that automatically calls `Init`.
-  ///
-  /// Errors are fatal.
-  ///
-  /// @param[in]  region              The memory region for this allocator.
-  explicit BlockAllocator(ByteSpan region) : BlockAllocator() {
-    PW_ASSERT(Init(region).ok());
-  }
-
-  ~BlockAllocator() override { Reset(); }
-
-  /// Returns a ``Range`` of blocks tracking the memory of this allocator.
-  Range blocks() const;
-
-  /// Sets the memory region to be used by this allocator.
-  ///
-  /// This method will instantiate an initial block using the memory region.
-  ///
-  /// @param[in]  region              The memory region for this allocator.
-  ///
-  /// @returns @rst
-  ///
-  /// .. pw-status-codes::
-  ///
-  ///    OK: The allocator is initialized.
-  ///
-  ///    INVALID_ARGUMENT: The memory region is null.
-  ///
-  ///    RESOURCE_EXHAUSTED: The region is too small for ``BlockType``.
-  ///
-  ///    OUT_OF_RANGE: The region too large for ``BlockType``.
-  ///
-  /// @endrst
-  Status Init(ByteSpan region);
-
-  /// Sets the blocks to be used by this allocator.
-  ///
-  /// This method will use the sequence blocks as-is, which must be valid. If
-  /// `end` is not provided, the sequence extends to a block marked "last".
-  ///
-  /// @param[in]  region              The memory region for this allocator.
-  ///
-  /// @returns @rst
-  ///
-  /// .. pw-status-codes::
-  ///
-  ///    OK: The allocator is initialized.
-  ///
-  ///    INVALID_ARGUMENT: The block sequence is empty.
-  ///
-  /// @endrst
-  Status Init(BlockType* begin, BlockType* end = nullptr);
-
-  /// Initializes the allocator with preconfigured blocks.
-  ///
-  /// This method does not
-
-  /// Resets the allocator to an uninitialized state.
-  ///
-  /// At the time of the call, there MUST NOT be any outstanding allocated
-  /// blocks from this allocator.
-  void Reset();
-
- protected:
-  using ReverseRange = typename BlockType::ReverseRange;
-
-  /// Returns a ``ReverseRange`` of blocks tracking the memory of this
-  /// allocator.
-  ReverseRange rblocks();
-
- private:
-  /// @copydoc Allocator::Allocate
-  void* DoAllocate(Layout layout) override;
-
-  /// @copydoc Allocator::Deallocate
-  void DoDeallocate(void* ptr) override;
-
-  /// @copydoc Allocator::Deallocate
-  void DoDeallocate(void* ptr, Layout) override { DoDeallocate(ptr); }
-
-  /// @copydoc Allocator::Resize
-  bool DoResize(void* ptr, size_t new_size) override;
-
-  /// @copydoc Allocator::GetUsableLayout
-  StatusWithSize DoGetCapacity() const override {
-    return StatusWithSize(capacity_);
-  }
-
-  /// @copydoc Allocator::GetUsableLayout
-  Result<Layout> DoGetUsableLayout(const void* ptr) const override;
-
-  /// @copydoc Allocator::GetAllocatedLayout
-  Result<Layout> DoGetAllocatedLayout(const void* ptr) const override;
-
-  /// @copydoc Allocator::Query
-  Status DoQuery(const void* ptr) const override;
-
-  /// Selects a free block to allocate from.
-  ///
-  /// This method represents the allocator-specific strategy of choosing which
-  /// block should be used to satisfy allocation requests.
-  ///
-  /// @param  layout  Same as ``Allocator::Allocate``.
-  virtual BlockType* ChooseBlock(Layout layout) = 0;
-
-  /// Returns the block associated with a pointer.
-  ///
-  /// If the given pointer is to this allocator's memory region, but not to a
-  /// valid block, the memory is corrupted and this method will crash to assist
-  /// in uncovering the underlying bug.
-  ///
-  /// @param  ptr           Pointer to an allocated block's usable space.
-  ///
-  /// @returns @rst
-  ///
-  /// .. pw-status-codes::
-  ///
-  ///    OK: Result contains a pointer to the block.
-  ///
-  ///    OUT_OF_RANGE: Given pointer is outside the allocator's memory.
-  ///
-  /// @endrst
-  template <typename PtrType,
-            typename BlockPtrType = std::conditional_t<
-                std::is_const_v<std::remove_pointer_t<PtrType>>,
-                const BlockType*,
-                BlockType*>>
-  Result<BlockPtrType> FromUsableSpace(PtrType ptr) const;
-
-  /// Ensures the pointer to the last block is correct after the given block is
-  /// allocated or freed.
-  void UpdateLast(BlockType* block);
-
-  // Represents the range of blocks for this allocator.
-  size_t capacity_ = 0;
-  BlockType* first_ = nullptr;
-  BlockType* last_ = nullptr;
-  uint16_t unpoisoned_ = 0;
-};
-
-}  // namespace internal
-
-/// Block allocator that uses a "first-fit" allocation strategy.
-///
-/// In this strategy, the allocator handles an allocation request by starting at
-/// the beginning of the range of blocks and looking for the first one which can
-/// satisfy the request.
-///
-/// This strategy may result in slightly worse fragmentation than the
-/// corresponding "last-fit" strategy, since the alignment may result in unused
-/// fragments both before and after an allocated block.
-template <typename OffsetType = uintptr_t,
-          size_t kPoisonInterval = 0,
-          size_t kAlign = alignof(OffsetType)>
-class FirstFitBlockAllocator
-    : public internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
- public:
-  using Base = internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
-  using BlockType = typename Base::BlockType;
-
-  constexpr FirstFitBlockAllocator() : Base() {}
-  explicit FirstFitBlockAllocator(ByteSpan region) : Base(region) {}
-
- private:
-  /// @copydoc Allocator::Allocate
-  BlockType* ChooseBlock(Layout layout) override {
-    // Search forwards for the first block that can hold this allocation.
-    for (auto* block : Base::blocks()) {
-      if (BlockType::AllocFirst(block, layout.size(), layout.alignment())
-              .ok()) {
-        return block;
-      }
-    }
-    return nullptr;
-  }
-};
-
-/// Block allocator that uses a "last-fit" allocation strategy.
-///
-/// In this strategy, the allocator handles an allocation request by starting at
-/// the end of the range of blocks and looking for the last one which can
-/// satisfy the request.
-///
-/// This strategy may result in slightly better fragmentation than the
-/// corresponding "first-fit" strategy, since even with alignment it will result
-/// in at most one unused fragment before the allocated block.
-template <typename OffsetType = uintptr_t,
-          size_t kPoisonInterval = 0,
-          size_t kAlign = alignof(OffsetType)>
-class LastFitBlockAllocator
-    : public internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
- public:
-  using Base = internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
-  using BlockType = typename Base::BlockType;
-
-  constexpr LastFitBlockAllocator() : Base() {}
-  explicit LastFitBlockAllocator(ByteSpan region) : Base(region) {}
-
- private:
-  /// @copydoc Allocator::Allocate
-  BlockType* ChooseBlock(Layout layout) override {
-    // Search backwards for the last block that can hold this allocation.
-    for (auto* block : Base::rblocks()) {
-      if (BlockType::AllocLast(block, layout.size(), layout.alignment()).ok()) {
-        return block;
-      }
-    }
-    return nullptr;
-  }
-};
-
-/// Block allocator that uses a "best-fit" allocation strategy.
-///
-/// In this strategy, the allocator handles an allocation request by looking at
-/// all unused blocks and finding the smallest one which can satisfy the
-/// request.
-///
-/// This algorithm may make better use of available memory by wasting less on
-/// unused fragments, but may also lead to worse fragmentation as those
-/// fragments are more likely to be too small to be useful to other requests.
-template <typename OffsetType = uintptr_t,
-          size_t kPoisonInterval = 0,
-          size_t kAlign = alignof(OffsetType)>
-class BestFitBlockAllocator
-    : public internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
- public:
-  using Base = internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
-  using BlockType = typename Base::BlockType;
-
-  constexpr BestFitBlockAllocator() : Base() {}
-  explicit BestFitBlockAllocator(ByteSpan region) : Base(region) {}
-
- private:
-  /// @copydoc Allocator::Allocate
-  BlockType* ChooseBlock(Layout layout) override {
-    // Search backwards for the smallest block that can hold this allocation.
-    BlockType* best = nullptr;
-    for (auto* block : Base::rblocks()) {
-      if (!block->CanAllocLast(layout.size(), layout.alignment()).ok()) {
-        continue;
-      }
-      if (best == nullptr || block->OuterSize() < best->OuterSize()) {
-        best = block;
-      }
-    }
-    if (best != nullptr &&
-        BlockType::AllocLast(best, layout.size(), layout.alignment()).ok()) {
-      return best;
-    }
-    return nullptr;
-  }
-};
-
-/// Block allocator that uses a "worst-fit" allocation strategy.
-///
-/// In this strategy, the allocator handles an allocation request by looking at
-/// all unused blocks and finding the biggest one which can satisfy the
-/// request.
-///
-/// This algorithm may lead to less fragmentation as any unused fragments are
-/// more likely to be large enough to be useful to other requests.
-template <typename OffsetType = uintptr_t,
-          size_t kPoisonInterval = 0,
-          size_t kAlign = alignof(OffsetType)>
-class WorstFitBlockAllocator
-    : public internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
- public:
-  using Base = internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
-  using BlockType = typename Base::BlockType;
-
-  constexpr WorstFitBlockAllocator() : Base() {}
-  explicit WorstFitBlockAllocator(ByteSpan region) : Base(region) {}
-
- private:
-  /// @copydoc Allocator::Allocate
-  BlockType* ChooseBlock(Layout layout) override {
-    // Search backwards for the biggest block that can hold this allocation.
-    BlockType* worst = nullptr;
-    for (auto* block : Base::rblocks()) {
-      if (!block->CanAllocLast(layout.size(), layout.alignment()).ok()) {
-        continue;
-      }
-      if (worst == nullptr || block->OuterSize() > worst->OuterSize()) {
-        worst = block;
-      }
-    }
-    if (worst != nullptr &&
-        BlockType::AllocLast(worst, layout.size(), layout.alignment()).ok()) {
-      return worst;
-    }
-    return nullptr;
-  }
-};
-
-/// Block allocator that uses a "dual first-fit" allocation strategy split
-/// between large and small allocations.
-///
-/// In this strategy, the strategy includes a threshold value. Requests for
-/// more than this threshold are handled similarly to `FirstFit`, while requests
-/// for less than this threshold are handled similarly to `LastFit`.
-///
-/// This algorithm approaches the performance of `FirstFit` and `LastFit` while
-/// improving on those algorithms fragmentation.
-template <typename OffsetType = uintptr_t,
-          size_t kPoisonInterval = 0,
-          size_t kAlign = alignof(OffsetType)>
-class DualFirstFitBlockAllocator
-    : public internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
- public:
-  using Base = internal::BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
-  using BlockType = typename Base::BlockType;
-
-  constexpr DualFirstFitBlockAllocator() : Base() {}
-  DualFirstFitBlockAllocator(ByteSpan region, size_t threshold)
-      : Base(region), threshold_(threshold) {}
-
-  /// Sets the threshold value for which requests are considered "large".
-  void set_threshold(size_t threshold) { threshold_ = threshold; }
-
- private:
-  /// @copydoc Allocator::Allocate
-  BlockType* ChooseBlock(Layout layout) override {
-    if (layout.size() < threshold_) {
-      // Search backwards for the last block that can hold this allocation.
-      for (auto* block : Base::rblocks()) {
-        if (BlockType::AllocLast(block, layout.size(), layout.alignment())
-                .ok()) {
-          return block;
-        }
-      }
-    } else {
-      // Search forwards for the first block that can hold this allocation.
-      for (auto* block : Base::blocks()) {
-        if (BlockType::AllocFirst(block, layout.size(), layout.alignment())
-                .ok()) {
-          return block;
-        }
-      }
-    }
-    // No valid block found.
-    return nullptr;
-  }
-
-  size_t threshold_ = 0;
-};
-
-// Template method implementations
-
-namespace internal {
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-typename BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Range
-BlockAllocator<OffsetType, kPoisonInterval, kAlign>::blocks() const {
-  return Range(first_);
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-typename BlockAllocator<OffsetType, kPoisonInterval, kAlign>::ReverseRange
-BlockAllocator<OffsetType, kPoisonInterval, kAlign>::rblocks() {
-  while (last_ != nullptr && !last_->Last()) {
-    last_ = last_->Next();
-  }
-  return ReverseRange(last_);
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-Status BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Init(
-    ByteSpan region) {
-  Result<BlockType*> result = BlockType::Init(region);
-  if (!result.ok()) {
-    return result.status();
-  }
-  return Init(*result);
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-Status BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Init(
-    BlockType* begin, BlockType* end) {
-  if (begin == nullptr) {
-    return Status::InvalidArgument();
-  }
-  if (end == nullptr) {
-    end = begin;
-    while (!end->Last()) {
-      end = end->Next();
-    }
-  } else if (begin < end) {
-    end->MarkLast();
-  } else {
-    return Status::InvalidArgument();
-  }
-  first_ = begin;
-  last_ = end;
-  for (const auto& block : blocks()) {
-    capacity_ += block->OuterSize();
-  }
-  return OkStatus();
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Reset() {
-  for (auto* block : blocks()) {
-    if (block->Used()) {
-      CrashOnAllocated(block);
-    }
-  }
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-void* BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoAllocate(
-    Layout layout) {
-  BlockType* block = ChooseBlock(layout);
-  if (block == nullptr) {
-    return nullptr;
-  }
-  UpdateLast(block);
-  return block->UsableSpace();
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoDeallocate(
-    void* ptr) {
-  auto result = FromUsableSpace(ptr);
-  if (!result.ok()) {
-    return;
-  }
-  BlockType* block = *result;
-
-  // Free the block and merge it with its neighbors, if possible.
-  BlockType::Free(block);
-  UpdateLast(block);
-
-  if constexpr (kPoisonInterval != 0) {
-    ++unpoisoned_;
-    if (unpoisoned_ >= kPoisonInterval) {
-      block->Poison();
-      unpoisoned_ = 0;
-    }
-  }
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-bool BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoResize(
-    void* ptr, size_t new_size) {
-  auto result = FromUsableSpace(ptr);
-  if (!result.ok()) {
-    return false;
-  }
-  BlockType* block = *result;
-
-  if (!BlockType::Resize(block, new_size).ok()) {
-    return false;
-  }
-  UpdateLast(block);
-  return true;
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-Result<Layout>
-BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoGetUsableLayout(
-    const void* ptr) const {
-  auto result = FromUsableSpace(ptr);
-  if (!result.ok()) {
-    return Status::NotFound();
-  }
-  const BlockType* block = result.value();
-  if (!block->Used()) {
-    return Status::FailedPrecondition();
-  }
-  return Layout(block->InnerSize(), block->Alignment());
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-Result<Layout>
-BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoGetAllocatedLayout(
-    const void* ptr) const {
-  auto result = FromUsableSpace(ptr);
-  if (!result.ok()) {
-    return Status::NotFound();
-  }
-  const BlockType* block = result.value();
-  if (!block->Used()) {
-    return Status::FailedPrecondition();
-  }
-  return Layout(block->OuterSize(), block->Alignment());
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-Status BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoQuery(
-    const void* ptr) const {
-  return FromUsableSpace(ptr).status();
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-template <typename PtrType, typename BlockPtrType>
-Result<BlockPtrType>
-BlockAllocator<OffsetType, kPoisonInterval, kAlign>::FromUsableSpace(
-    PtrType ptr) const {
-  if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
-    return Status::OutOfRange();
-  }
-  BlockPtrType block = BlockType::FromUsableSpace(ptr);
-  block->CrashIfInvalid();
-  return block;
-}
-
-template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
-void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::UpdateLast(
-    BlockType* block) {
-  if (block->Last()) {
-    last_ = block;
-  } else if (block->Next()->Last()) {
-    last_ = block->Next();
-  }
-}
-
-}  // namespace internal
-}  // namespace pw::allocator
+// TODO(b/326509341): Remove when all customers import the correct files.
+#include "pw_allocator/best_fit_block_allocator.h"
+#include "pw_allocator/dual_first_fit_block_allocator.h"
+#include "pw_allocator/first_fit_block_allocator.h"
+#include "pw_allocator/last_fit_block_allocator.h"
+#include "pw_allocator/worst_fit_block_allocator.h"
diff --git a/pw_allocator/public/pw_allocator/block_allocator_base.h b/pw_allocator/public/pw_allocator/block_allocator_base.h
new file mode 100644
index 0000000..a0b58ae
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/block_allocator_base.h
@@ -0,0 +1,386 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_allocator/allocator.h"
+#include "pw_allocator/block.h"
+#include "pw_allocator/capability.h"
+#include "pw_bytes/span.h"
+#include "pw_result/result.h"
+#include "pw_status/status.h"
+
+namespace pw::allocator {
+namespace internal {
+
+/// Block-independent base class of ``BlockAllocator``.
+///
+/// This class contains static methods which do not depend on the template
+/// parameters of ``BlockAllocator`` that are used to determine block/ type.
+/// This allows the methods to be defined in a separate source file and use
+/// macros that cannot be used in headers, e.g. PW_CHECK.
+///
+/// This class should not be used directly. Instead, use ``BlockAllocator`` or
+/// one of its specializations.
+class GenericBlockAllocator : public Allocator {
+ public:
+  static constexpr Capabilities kCapabilities = kImplementsGetUsableLayout |
+                                                kImplementsGetAllocatedLayout |
+                                                kImplementsQuery;
+
+  // Not copyable or movable.
+  GenericBlockAllocator(const GenericBlockAllocator&) = delete;
+  GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
+  GenericBlockAllocator(GenericBlockAllocator&&) = delete;
+  GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
+
+ protected:
+  constexpr GenericBlockAllocator() : Allocator(kCapabilities) {}
+
+  /// Crashes with an informational method that the given block is allocated.
+  ///
+  /// This method is meant to be called by ``SplitFreeListAllocator``s
+  /// destructor. There must not be any outstanding allocations from an
+  /// when it is destroyed.
+  static void CrashOnAllocated(void* allocated);
+};
+
+}  // namespace internal
+
+/// A memory allocator that uses a list of blocks.
+///
+/// This class does not implement `ChooseBlock` and cannot be used directly.
+/// Instead, use one of its specializations.
+///
+/// NOTE!! Do NOT use memory returned from this allocator as the backing for
+/// another allocator. If this is done, the `Query` method may incorrectly
+/// think pointers returned by that allocator were created by this one, and
+/// report that this allocator can de/reallocate them.
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+class BlockAllocator : public internal::GenericBlockAllocator {
+ public:
+  using BlockType = Block<OffsetType, kAlign, kPoisonInterval != 0>;
+  using Range = typename BlockType::Range;
+
+  /// Constexpr constructor. Callers must explicitly call `Init`.
+  constexpr BlockAllocator() : internal::GenericBlockAllocator() {}
+
+  /// Non-constexpr constructor that automatically calls `Init`.
+  ///
+  /// Errors are fatal.
+  ///
+  /// @param[in]  region              The memory region for this allocator.
+  explicit BlockAllocator(ByteSpan region) : BlockAllocator() {
+    PW_ASSERT(Init(region).ok());
+  }
+
+  ~BlockAllocator() override { Reset(); }
+
+  /// Returns a ``Range`` of blocks tracking the memory of this allocator.
+  Range blocks() const;
+
+  /// Sets the memory region to be used by this allocator.
+  ///
+  /// This method will instantiate an initial block using the memory region.
+  ///
+  /// @param[in]  region              The memory region for this allocator.
+  ///
+  /// @returns @rst
+  ///
+  /// .. pw-status-codes::
+  ///
+  ///    OK: The allocator is initialized.
+  ///
+  ///    INVALID_ARGUMENT: The memory region is null.
+  ///
+  ///    RESOURCE_EXHAUSTED: The region is too small for ``BlockType``.
+  ///
+  ///    OUT_OF_RANGE: The region too large for ``BlockType``.
+  ///
+  /// @endrst
+  Status Init(ByteSpan region);
+
+  /// Sets the blocks to be used by this allocator.
+  ///
+  /// This method will use the sequence blocks as-is, which must be valid. If
+  /// `end` is not provided, the sequence extends to a block marked "last".
+  ///
+  /// @param[in]  region              The memory region for this allocator.
+  ///
+  /// @returns @rst
+  ///
+  /// .. pw-status-codes::
+  ///
+  ///    OK: The allocator is initialized.
+  ///
+  ///    INVALID_ARGUMENT: The block sequence is empty.
+  ///
+  /// @endrst
+  Status Init(BlockType* begin, BlockType* end = nullptr);
+
+  /// Initializes the allocator with preconfigured blocks.
+  ///
+  /// This method does not
+
+  /// Resets the allocator to an uninitialized state.
+  ///
+  /// At the time of the call, there MUST NOT be any outstanding allocated
+  /// blocks from this allocator.
+  void Reset();
+
+ protected:
+  using ReverseRange = typename BlockType::ReverseRange;
+
+  /// Returns a ``ReverseRange`` of blocks tracking the memory of this
+  /// allocator.
+  ReverseRange rblocks();
+
+ private:
+  /// @copydoc Allocator::Allocate
+  void* DoAllocate(Layout layout) override;
+
+  /// @copydoc Allocator::Deallocate
+  void DoDeallocate(void* ptr) override;
+
+  /// @copydoc Allocator::Deallocate
+  void DoDeallocate(void* ptr, Layout) override { DoDeallocate(ptr); }
+
+  /// @copydoc Allocator::Resize
+  bool DoResize(void* ptr, size_t new_size) override;
+
+  /// @copydoc Allocator::GetUsableLayout
+  StatusWithSize DoGetCapacity() const override {
+    return StatusWithSize(capacity_);
+  }
+
+  /// @copydoc Allocator::GetUsableLayout
+  Result<Layout> DoGetUsableLayout(const void* ptr) const override;
+
+  /// @copydoc Allocator::GetAllocatedLayout
+  Result<Layout> DoGetAllocatedLayout(const void* ptr) const override;
+
+  /// @copydoc Allocator::Query
+  Status DoQuery(const void* ptr) const override;
+
+  /// Selects a free block to allocate from.
+  ///
+  /// This method represents the allocator-specific strategy of choosing which
+  /// block should be used to satisfy allocation requests.
+  ///
+  /// @param  layout  Same as ``Allocator::Allocate``.
+  virtual BlockType* ChooseBlock(Layout layout) = 0;
+
+  /// Returns the block associated with a pointer.
+  ///
+  /// If the given pointer is to this allocator's memory region, but not to a
+  /// valid block, the memory is corrupted and this method will crash to assist
+  /// in uncovering the underlying bug.
+  ///
+  /// @param  ptr           Pointer to an allocated block's usable space.
+  ///
+  /// @returns @rst
+  ///
+  /// .. pw-status-codes::
+  ///
+  ///    OK: Result contains a pointer to the block.
+  ///
+  ///    OUT_OF_RANGE: Given pointer is outside the allocator's memory.
+  ///
+  /// @endrst
+  template <typename PtrType,
+            typename BlockPtrType = std::conditional_t<
+                std::is_const_v<std::remove_pointer_t<PtrType>>,
+                const BlockType*,
+                BlockType*>>
+  Result<BlockPtrType> FromUsableSpace(PtrType ptr) const;
+
+  /// Ensures the pointer to the last block is correct after the given block is
+  /// allocated or freed.
+  void UpdateLast(BlockType* block);
+
+  // Represents the range of blocks for this allocator.
+  size_t capacity_ = 0;
+  BlockType* first_ = nullptr;
+  BlockType* last_ = nullptr;
+  uint16_t unpoisoned_ = 0;
+};
+
+// Template method implementations
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+typename BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Range
+BlockAllocator<OffsetType, kPoisonInterval, kAlign>::blocks() const {
+  return Range(first_);
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+typename BlockAllocator<OffsetType, kPoisonInterval, kAlign>::ReverseRange
+BlockAllocator<OffsetType, kPoisonInterval, kAlign>::rblocks() {
+  while (last_ != nullptr && !last_->Last()) {
+    last_ = last_->Next();
+  }
+  return ReverseRange(last_);
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+Status BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Init(
+    ByteSpan region) {
+  Result<BlockType*> result = BlockType::Init(region);
+  if (!result.ok()) {
+    return result.status();
+  }
+  return Init(*result);
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+Status BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Init(
+    BlockType* begin, BlockType* end) {
+  if (begin == nullptr) {
+    return Status::InvalidArgument();
+  }
+  if (end == nullptr) {
+    end = begin;
+    while (!end->Last()) {
+      end = end->Next();
+    }
+  } else if (begin < end) {
+    end->MarkLast();
+  } else {
+    return Status::InvalidArgument();
+  }
+  first_ = begin;
+  last_ = end;
+  for (const auto& block : blocks()) {
+    capacity_ += block->OuterSize();
+  }
+  return OkStatus();
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Reset() {
+  for (auto* block : blocks()) {
+    if (block->Used()) {
+      CrashOnAllocated(block);
+    }
+  }
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+void* BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoAllocate(
+    Layout layout) {
+  BlockType* block = ChooseBlock(layout);
+  if (block == nullptr) {
+    return nullptr;
+  }
+  UpdateLast(block);
+  return block->UsableSpace();
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoDeallocate(
+    void* ptr) {
+  auto result = FromUsableSpace(ptr);
+  if (!result.ok()) {
+    return;
+  }
+  BlockType* block = *result;
+
+  // Free the block and merge it with its neighbors, if possible.
+  BlockType::Free(block);
+  UpdateLast(block);
+
+  if constexpr (kPoisonInterval != 0) {
+    ++unpoisoned_;
+    if (unpoisoned_ >= kPoisonInterval) {
+      block->Poison();
+      unpoisoned_ = 0;
+    }
+  }
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+bool BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoResize(
+    void* ptr, size_t new_size) {
+  auto result = FromUsableSpace(ptr);
+  if (!result.ok()) {
+    return false;
+  }
+  BlockType* block = *result;
+
+  if (!BlockType::Resize(block, new_size).ok()) {
+    return false;
+  }
+  UpdateLast(block);
+  return true;
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+Result<Layout>
+BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoGetUsableLayout(
+    const void* ptr) const {
+  auto result = FromUsableSpace(ptr);
+  if (!result.ok()) {
+    return Status::NotFound();
+  }
+  const BlockType* block = result.value();
+  if (!block->Used()) {
+    return Status::FailedPrecondition();
+  }
+  return Layout(block->InnerSize(), block->Alignment());
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+Result<Layout>
+BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoGetAllocatedLayout(
+    const void* ptr) const {
+  auto result = FromUsableSpace(ptr);
+  if (!result.ok()) {
+    return Status::NotFound();
+  }
+  const BlockType* block = result.value();
+  if (!block->Used()) {
+    return Status::FailedPrecondition();
+  }
+  return Layout(block->OuterSize(), block->Alignment());
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+Status BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoQuery(
+    const void* ptr) const {
+  return FromUsableSpace(ptr).status();
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+template <typename PtrType, typename BlockPtrType>
+Result<BlockPtrType>
+BlockAllocator<OffsetType, kPoisonInterval, kAlign>::FromUsableSpace(
+    PtrType ptr) const {
+  if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
+    return Status::OutOfRange();
+  }
+  BlockPtrType block = BlockType::FromUsableSpace(ptr);
+  block->CrashIfInvalid();
+  return block;
+}
+
+template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
+void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::UpdateLast(
+    BlockType* block) {
+  if (block->Last()) {
+    last_ = block;
+  } else if (block->Next()->Last()) {
+    last_ = block->Next();
+  }
+}
+
+}  // namespace pw::allocator
diff --git a/pw_allocator/public/pw_allocator/block_allocator_testing.h b/pw_allocator/public/pw_allocator/block_allocator_testing.h
new file mode 100644
index 0000000..44b554b
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/block_allocator_testing.h
@@ -0,0 +1,173 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include <cstddef>
+#include <cstdint>
+#include <limits>
+
+#include "pw_allocator/block.h"
+#include "pw_allocator/block_allocator.h"
+#include "pw_unit_test/framework.h"
+
+namespace pw::allocator::test {
+
+// Forward declaration.
+struct Preallocation;
+
+/// Test fixture responsible for managing a memory region and an allocator that
+/// allocates from it.
+class BlockAllocatorTest : public ::testing::Test {
+ public:
+  using OffsetType = uint16_t;
+  using BlockType = Block<OffsetType>;
+  using BlockAllocatorType = BlockAllocator<OffsetType, 0, alignof(OffsetType)>;
+
+  static_assert(
+      std::is_same_v<BlockType, typename BlockAllocatorType::BlockType>);
+
+  // Size of the memory region to use in the tests below.
+  static constexpr size_t kCapacity = 1024;
+
+  // The number of allocated pointers cached by the test fixture.
+  static constexpr size_t kNumPtrs = 16;
+
+  // Represents the sizes of varios allocations.
+  static constexpr size_t kLargeInnerSize = kCapacity / 8;
+  static constexpr size_t kLargeOuterSize =
+      BlockType::kBlockOverhead + kLargeInnerSize;
+
+  static constexpr size_t kSmallInnerSize = BlockType::kBlockOverhead * 2;
+  static constexpr size_t kSmallOuterSize =
+      BlockType::kBlockOverhead + kSmallInnerSize;
+
+  static constexpr size_t kSmallerOuterSize = kSmallInnerSize;
+  static constexpr size_t kLargerOuterSize =
+      kLargeOuterSize + kSmallerOuterSize;
+
+ protected:
+  BlockAllocatorTest(BlockAllocatorType& allocator);
+
+  // Test fixtures.
+
+  void SetUp() override;
+
+  /// Returns the underlying memory region.
+  ByteSpan GetBytes();
+
+  /// Initialize the allocator with a region of memory and return it.
+  BlockAllocatorType& GetAllocator();
+
+  /// Initialize the allocator with a sequence of preallocated blocks and return
+  /// it.
+  ///
+  /// See also ``Preallocation``.
+  BlockAllocatorType& GetAllocator(
+      std::initializer_list<Preallocation> preallocations);
+
+  /// Gets the next allocation from an allocated pointer.
+  void* NextAfter(size_t index);
+
+  /// Store an allocated pointer in the test's cache of pointers.
+  void Store(size_t index, void* ptr);
+
+  /// Retrieve an allocated pointer from the test's cache of pointers.
+  void* Fetch(size_t index);
+
+  /// Ensures the memory is usable by writing to it.
+  void UseMemory(void* ptr, size_t size);
+
+  void TearDown() override;
+
+  // Unit tests.
+  void CanAutomaticallyInit(BlockAllocatorType& allocator);
+  void CanExplicitlyInit(BlockAllocatorType& allocator);
+  void GetCapacity();
+  void AllocateLarge();
+  void AllocateSmall();
+  void AllocateTooLarge();
+  void AllocateLargeAlignment();
+  void AllocateAlignmentFailure();
+  void DeallocateNull();
+  void DeallocateShuffled();
+  void IterateOverBlocks();
+  void ResizeNull();
+  void ResizeLargeSame();
+  void ResizeLargeSmaller();
+  void ResizeLargeLarger();
+  void ResizeLargeLargerFailure();
+  void ResizeSmallSame();
+  void ResizeSmallSmaller();
+  void ResizeSmallLarger();
+  void ResizeSmallLargerFailure();
+  void CanGetLayoutFromValidPointer();
+  void CannotGetLayoutFromInvalidPointer();
+
+ private:
+  BlockAllocatorType& allocator_;
+  alignas(BlockType::kAlignment) std::array<std::byte, kCapacity> buffer_;
+  std::array<void*, kNumPtrs> ptrs_;
+};
+
+/// Represents an initial state for a memory block.
+///
+/// Unit tests can specify an initial block layout by passing a list of these
+/// structs to `Preallocate`.
+///
+/// The outer size of each block must be at least `kBlockOverhead` for the
+/// block type in use. The special `kSizeRemaining` may be used for at most
+/// one block to give it any space not assigned to other blocks.
+///
+/// The index must be less than `BlockAllocatorBlockAllocatorTest::kNumPtrs` or
+/// one of the special values `kIndexFree` or `kIndexNext`. A regular index will
+/// mark the block as "used" and cache the pointer to its usable space in
+/// `ptrs[index]`. The special value `kIndexFree` will leave the block as
+/// "free". The special value `kIndexNext` will mark the block as "used" and
+/// cache its pointer in the next available slot in `test_fixture`. This
+/// may be used/ when the pointer is not needed for the test but should still
+/// be automatically freed at the end of the test.
+///
+/// Example:
+/// @code{.cpp}
+///   // BlockType = UnpoisonedBlock<uint32_t>, so kBlockOverhead == 8.
+///   ASSERT_EQ(Preallocate({
+///     {32,              0},           // ptrs[0] == 24 byte region.
+///     {24,              kIndexFree},  // Free block of 16 bytes.
+///     {48,              2},           // ptrs[2] == 40 byte region.
+///     {kSizeRemaining,  kIndesFree},  // Free block of leftover space.
+///     {64,              4},           // ptrs[4] == 56 byte region from the
+///                                     //             end of the allocator.
+///   }), OkStatus());
+/// @endcode
+struct Preallocation {
+  /// The outer size of the block to preallocate.
+  size_t outer_size;
+
+  // Index into the `test_fixture` array where the pointer to the block's
+  // space should be cached.
+  size_t index;
+
+  /// Special value indicating the block should comprise of the all remaining
+  /// space not preallocated to any other block. May be used at most once.
+  static constexpr size_t kSizeRemaining = std::numeric_limits<size_t>::max();
+
+  /// Special value indicating the block should be treated as unallocated,
+  /// i.e. it's pointer should not be cached.
+  static constexpr size_t kIndexFree = BlockAllocatorTest::kNumPtrs + 1;
+
+  /// Special value indicating to use the next available index.
+  static constexpr size_t kIndexNext = BlockAllocatorTest::kNumPtrs + 2;
+};
+
+}  // namespace pw::allocator::test
diff --git a/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h b/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h
new file mode 100644
index 0000000..06fa8d3
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h
@@ -0,0 +1,72 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_allocator/block_allocator_base.h"
+
+namespace pw::allocator {
+
+/// Block allocator that uses a "dual first-fit" allocation strategy split
+/// between large and small allocations.
+///
+/// In this strategy, the strategy includes a threshold value. Requests for
+/// more than this threshold are handled similarly to `FirstFit`, while requests
+/// for less than this threshold are handled similarly to `LastFit`.
+///
+/// This algorithm approaches the performance of `FirstFit` and `LastFit` while
+/// improving on those algorithms fragmentation.
+template <typename OffsetType = uintptr_t,
+          size_t kPoisonInterval = 0,
+          size_t kAlign = alignof(OffsetType)>
+class DualFirstFitBlockAllocator
+    : public BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
+ public:
+  using Base = BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
+  using BlockType = typename Base::BlockType;
+
+  constexpr DualFirstFitBlockAllocator() : Base() {}
+  DualFirstFitBlockAllocator(ByteSpan region, size_t threshold)
+      : Base(region), threshold_(threshold) {}
+
+  /// Sets the threshold value for which requests are considered "large".
+  void set_threshold(size_t threshold) { threshold_ = threshold; }
+
+ private:
+  /// @copydoc Allocator::Allocate
+  BlockType* ChooseBlock(Layout layout) override {
+    if (layout.size() < threshold_) {
+      // Search backwards for the last block that can hold this allocation.
+      for (auto* block : Base::rblocks()) {
+        if (BlockType::AllocLast(block, layout.size(), layout.alignment())
+                .ok()) {
+          return block;
+        }
+      }
+    } else {
+      // Search forwards for the first block that can hold this allocation.
+      for (auto* block : Base::blocks()) {
+        if (BlockType::AllocFirst(block, layout.size(), layout.alignment())
+                .ok()) {
+          return block;
+        }
+      }
+    }
+    // No valid block found.
+    return nullptr;
+  }
+
+  size_t threshold_ = 0;
+};
+
+}  // namespace pw::allocator
diff --git a/pw_allocator/public/pw_allocator/first_fit_block_allocator.h b/pw_allocator/public/pw_allocator/first_fit_block_allocator.h
new file mode 100644
index 0000000..cc7743c
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/first_fit_block_allocator.h
@@ -0,0 +1,55 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_allocator/block_allocator_base.h"
+
+namespace pw::allocator {
+
+/// Block allocator that uses a "first-fit" allocation strategy.
+///
+/// In this strategy, the allocator handles an allocation request by starting at
+/// the beginning of the range of blocks and looking for the first one which can
+/// satisfy the request.
+///
+/// This strategy may result in slightly worse fragmentation than the
+/// corresponding "last-fit" strategy, since the alignment may result in unused
+/// fragments both before and after an allocated block.
+template <typename OffsetType = uintptr_t,
+          size_t kPoisonInterval = 0,
+          size_t kAlign = alignof(OffsetType)>
+class FirstFitBlockAllocator
+    : public BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
+ public:
+  using Base = BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
+  using BlockType = typename Base::BlockType;
+
+  constexpr FirstFitBlockAllocator() : Base() {}
+  explicit FirstFitBlockAllocator(ByteSpan region) : Base(region) {}
+
+ private:
+  /// @copydoc Allocator::Allocate
+  BlockType* ChooseBlock(Layout layout) override {
+    // Search forwards for the first block that can hold this allocation.
+    for (auto* block : Base::blocks()) {
+      if (BlockType::AllocFirst(block, layout.size(), layout.alignment())
+              .ok()) {
+        return block;
+      }
+    }
+    return nullptr;
+  }
+};
+
+}  // namespace pw::allocator
diff --git a/pw_allocator/public/pw_allocator/last_fit_block_allocator.h b/pw_allocator/public/pw_allocator/last_fit_block_allocator.h
new file mode 100644
index 0000000..aead1f2
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/last_fit_block_allocator.h
@@ -0,0 +1,54 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_allocator/block_allocator_base.h"
+
+namespace pw::allocator {
+
+/// Block allocator that uses a "last-fit" allocation strategy.
+///
+/// In this strategy, the allocator handles an allocation request by starting at
+/// the end of the range of blocks and looking for the last one which can
+/// satisfy the request.
+///
+/// This strategy may result in slightly better fragmentation than the
+/// corresponding "first-fit" strategy, since even with alignment it will result
+/// in at most one unused fragment before the allocated block.
+template <typename OffsetType = uintptr_t,
+          size_t kPoisonInterval = 0,
+          size_t kAlign = alignof(OffsetType)>
+class LastFitBlockAllocator
+    : public BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
+ public:
+  using Base = BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
+  using BlockType = typename Base::BlockType;
+
+  constexpr LastFitBlockAllocator() : Base() {}
+  explicit LastFitBlockAllocator(ByteSpan region) : Base(region) {}
+
+ private:
+  /// @copydoc Allocator::Allocate
+  BlockType* ChooseBlock(Layout layout) override {
+    // Search backwards for the last block that can hold this allocation.
+    for (auto* block : Base::rblocks()) {
+      if (BlockType::AllocLast(block, layout.size(), layout.alignment()).ok()) {
+        return block;
+      }
+    }
+    return nullptr;
+  }
+};
+
+}  // namespace pw::allocator
diff --git a/pw_allocator/public/pw_allocator/testing.h b/pw_allocator/public/pw_allocator/testing.h
index 2c98d62..50dfb95 100644
--- a/pw_allocator/public/pw_allocator/testing.h
+++ b/pw_allocator/public/pw_allocator/testing.h
@@ -17,8 +17,8 @@
 
 #include "pw_allocator/allocator.h"
 #include "pw_allocator/block.h"
-#include "pw_allocator/block_allocator.h"
 #include "pw_allocator/buffer.h"
+#include "pw_allocator/first_fit_block_allocator.h"
 #include "pw_allocator/metrics.h"
 #include "pw_allocator/tracking_allocator.h"
 #include "pw_bytes/span.h"
diff --git a/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h b/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h
new file mode 100644
index 0000000..bd48d7a
--- /dev/null
+++ b/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h
@@ -0,0 +1,61 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_allocator/block_allocator_base.h"
+
+namespace pw::allocator {
+
+/// Block allocator that uses a "worst-fit" allocation strategy.
+///
+/// In this strategy, the allocator handles an allocation request by looking at
+/// all unused blocks and finding the biggest one which can satisfy the
+/// request.
+///
+/// This algorithm may lead to less fragmentation as any unused fragments are
+/// more likely to be large enough to be useful to other requests.
+template <typename OffsetType = uintptr_t,
+          size_t kPoisonInterval = 0,
+          size_t kAlign = alignof(OffsetType)>
+class WorstFitBlockAllocator
+    : public BlockAllocator<OffsetType, kPoisonInterval, kAlign> {
+ public:
+  using Base = BlockAllocator<OffsetType, kPoisonInterval, kAlign>;
+  using BlockType = typename Base::BlockType;
+
+  constexpr WorstFitBlockAllocator() : Base() {}
+  explicit WorstFitBlockAllocator(ByteSpan region) : Base(region) {}
+
+ private:
+  /// @copydoc Allocator::Allocate
+  BlockType* ChooseBlock(Layout layout) override {
+    // Search backwards for the biggest block that can hold this allocation.
+    BlockType* worst = nullptr;
+    for (auto* block : Base::rblocks()) {
+      if (!block->CanAllocLast(layout.size(), layout.alignment()).ok()) {
+        continue;
+      }
+      if (worst == nullptr || block->OuterSize() > worst->OuterSize()) {
+        worst = block;
+      }
+    }
+    if (worst != nullptr &&
+        BlockType::AllocLast(worst, layout.size(), layout.alignment()).ok()) {
+      return worst;
+    }
+    return nullptr;
+  }
+};
+
+}  // namespace pw::allocator
diff --git a/pw_allocator/tracking_allocator_test.cc b/pw_allocator/tracking_allocator_test.cc
index 9e6e3b7..97ef683 100644
--- a/pw_allocator/tracking_allocator_test.cc
+++ b/pw_allocator/tracking_allocator_test.cc
@@ -16,7 +16,6 @@
 #include <cstdint>
 
 #include "pw_allocator/allocator.h"
-#include "pw_allocator/block_allocator.h"
 #include "pw_allocator/metrics.h"
 #include "pw_allocator/testing.h"
 #include "pw_log/log.h"
diff --git a/pw_allocator/worst_fit_block_allocator_test.cc b/pw_allocator/worst_fit_block_allocator_test.cc
new file mode 100644
index 0000000..87f111c
--- /dev/null
+++ b/pw_allocator/worst_fit_block_allocator_test.cc
@@ -0,0 +1,120 @@
+// Copyright 2024 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_allocator/worst_fit_block_allocator.h"
+
+#include "pw_allocator/block_allocator_testing.h"
+#include "pw_unit_test/framework.h"
+
+namespace pw::allocator {
+namespace {
+
+// Test fixtures.
+
+using BlockAllocatorTest = test::BlockAllocatorTest;
+using Preallocation = test::Preallocation;
+using WorstFitBlockAllocatorType =
+    WorstFitBlockAllocator<BlockAllocatorTest::OffsetType>;
+
+class WorstFitBlockAllocatorTest : public BlockAllocatorTest {
+ public:
+  WorstFitBlockAllocatorTest() : BlockAllocatorTest(allocator_) {}
+
+ private:
+  WorstFitBlockAllocatorType allocator_;
+};
+
+// Unit tests.
+
+TEST_F(WorstFitBlockAllocatorTest, CanAutomaticallyInit) {
+  WorstFitBlockAllocatorType allocator(GetBytes());
+  CanAutomaticallyInit(allocator);
+}
+
+TEST_F(WorstFitBlockAllocatorTest, CanExplicitlyInit) {
+  WorstFitBlockAllocatorType allocator;
+  CanExplicitlyInit(allocator);
+}
+
+TEST_F(WorstFitBlockAllocatorTest, GetCapacity) { GetCapacity(); }
+
+TEST_F(WorstFitBlockAllocatorTest, AllocateLarge) { AllocateLarge(); }
+
+TEST_F(WorstFitBlockAllocatorTest, AllocateSmall) { AllocateSmall(); }
+
+TEST_F(WorstFitBlockAllocatorTest, AllocateLargeAlignment) {
+  AllocateLargeAlignment();
+}
+
+TEST_F(WorstFitBlockAllocatorTest, AllocateAlignmentFailure) {
+  AllocateAlignmentFailure();
+}
+
+TEST_F(WorstFitBlockAllocatorTest, AllocatesWorstCompatible) {
+  auto& allocator = GetAllocator({
+      {kLargeOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 1},
+      {kSmallOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 3},
+      {kSmallerOuterSize, Preallocation::kIndexFree},
+      {kSmallerOuterSize, 5},
+      {kLargerOuterSize, Preallocation::kIndexFree},
+      {Preallocation::kSizeRemaining, 7},
+  });
+
+  Store(6, allocator.Allocate(Layout(kLargeInnerSize, 1)));
+  EXPECT_EQ(NextAfter(5), Fetch(6));
+  EXPECT_EQ(NextAfter(6), Fetch(7));
+  Store(0, allocator.Allocate(Layout(kSmallInnerSize, 1)));
+  EXPECT_EQ(NextAfter(0), Fetch(1));
+}
+
+TEST_F(WorstFitBlockAllocatorTest, DeallocateNull) { DeallocateNull(); }
+
+TEST_F(WorstFitBlockAllocatorTest, DeallocateShuffled) { DeallocateShuffled(); }
+
+TEST_F(WorstFitBlockAllocatorTest, IterateOverBlocks) { IterateOverBlocks(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeNull) { ResizeNull(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeLargeSame) { ResizeLargeSame(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeLargeSmaller) { ResizeLargeSmaller(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeLargeLarger) { ResizeLargeLarger(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeLargeLargerFailure) {
+  ResizeLargeLargerFailure();
+}
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeSmallSame) { ResizeSmallSame(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeSmallSmaller) { ResizeSmallSmaller(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeSmallLarger) { ResizeSmallLarger(); }
+
+TEST_F(WorstFitBlockAllocatorTest, ResizeSmallLargerFailure) {
+  ResizeSmallLargerFailure();
+}
+
+TEST_F(WorstFitBlockAllocatorTest, CanGetLayoutFromValidPointer) {
+  CanGetLayoutFromValidPointer();
+}
+
+TEST_F(WorstFitBlockAllocatorTest, CannotGetLayoutFromInvalidPointer) {
+  CannotGetLayoutFromInvalidPointer();
+}
+
+}  // namespace
+}  // namespace pw::allocator