DRAFT: Provide stack trace API with stack allocator
diff --git a/kotlin-native/runtime/src/main/cpp/Natives.cpp b/kotlin-native/runtime/src/main/cpp/Natives.cpp
index 44caaf60e..3d5616d 100644
--- a/kotlin-native/runtime/src/main/cpp/Natives.cpp
+++ b/kotlin-native/runtime/src/main/cpp/Natives.cpp
@@ -42,12 +42,12 @@
 }
 
 NO_INLINE OBJ_GETTER0(Kotlin_getCurrentStackTrace) {
-    KStdVector<void*> stackTrace;
+    kotlin::StackTrace stackTrace;
     {
         // Don't use `kotlin::CallWithThreadState` to avoid messing up callstack.
         kotlin::ThreadStateGuard guard(kotlin::ThreadState::kNative);
         // Skip this function and primary `Throwable` constructor.
-        stackTrace = kotlin::GetCurrentStackTrace(2);
+        stackTrace = kotlin::StackTrace<>::current(2);
     }
 
     ObjHolder resultHolder;
@@ -61,7 +61,7 @@
 OBJ_GETTER(Kotlin_getStackTraceStrings, KConstRef stackTrace) {
     const KNativePtr* array = PrimitiveArrayAddressOfElementAt<KNativePtr>(stackTrace->array(), 0);
     size_t size = stackTrace->array()->count_;
-    auto stackTraceStrings = kotlin::CallWithThreadState<kotlin::ThreadState::kNative>(kotlin::GetStackTraceStrings, array, size);
+    auto stackTraceStrings = kotlin::CallWithThreadState<kotlin::ThreadState::kNative>(kotlin::GetStackTraceStrings, kotlin::std_support::span<void* const>(array, size));
     ObjHolder resultHolder;
     ObjHeader* strings = AllocArrayInstance(theArrayTypeInfo, stackTraceStrings.size(), resultHolder.slot());
 
diff --git a/kotlin-native/runtime/src/main/cpp/StackTrace.cpp b/kotlin-native/runtime/src/main/cpp/StackTrace.cpp
index fc18ed8..03a9b5c 100644
--- a/kotlin-native/runtime/src/main/cpp/StackTrace.cpp
+++ b/kotlin-native/runtime/src/main/cpp/StackTrace.cpp
@@ -3,6 +3,8 @@
  * that can be found in the LICENSE file.
  */
 
+//#define USE_GCC_UNWIND 1
+
 #include "StackTrace.hpp"
 
 #if KONAN_NO_BACKTRACE
@@ -27,46 +29,6 @@
 
 namespace {
 
-#if USE_GCC_UNWIND
-struct Backtrace {
-    Backtrace(int count, int skip) : skipCount(skip) {
-        uint32_t size = count - skipCount;
-        if (size < 0) {
-            size = 0;
-        }
-        array.reserve(size);
-    }
-
-    void setNextElement(_Unwind_Ptr element) { array.push_back(reinterpret_cast<void*>(element)); }
-
-    int skipCount;
-    KStdVector<void*> array;
-};
-
-_Unwind_Reason_Code depthCountCallback(struct _Unwind_Context* context, void* arg) {
-    int* result = reinterpret_cast<int*>(arg);
-    (*result)++;
-    return _URC_NO_REASON;
-}
-
-_Unwind_Reason_Code unwindCallback(struct _Unwind_Context* context, void* arg) {
-    Backtrace* backtrace = reinterpret_cast<Backtrace*>(arg);
-    if (backtrace->skipCount > 0) {
-        backtrace->skipCount--;
-        return _URC_NO_REASON;
-    }
-
-#if (__MINGW32__ || __MINGW64__)
-    _Unwind_Ptr address = _Unwind_GetRegionStart(context);
-#else
-    _Unwind_Ptr address = _Unwind_GetIP(context);
-#endif
-    backtrace->setNextElement(address);
-
-    return _URC_NO_REASON;
-}
-#endif
-
 THREAD_LOCAL_VARIABLE bool disallowSourceInfo = false;
 
 #if !KONAN_NO_BACKTRACE
@@ -77,38 +39,13 @@
 
 } // namespace
 
-// TODO: this implementation is just a hack, e.g. the result is inexact;
-// however it is better to have an inexact stacktrace than not to have any.
-NO_INLINE KStdVector<void*> kotlin::GetCurrentStackTrace(int extraSkipFrames) noexcept {
-#if KONAN_NO_BACKTRACE
-    return {};
-#else
-    // Skips this function frame + anything asked by the caller.
-    const int kSkipFrames = 1 + extraSkipFrames;
 #if USE_GCC_UNWIND
-    int depth = 0;
-    _Unwind_Backtrace(depthCountCallback, static_cast<void*>(&depth));
-    Backtrace result(depth, kSkipFrames);
-    if (result.array.capacity() > 0) {
-        _Unwind_Backtrace(unwindCallback, static_cast<void*>(&result));
-    }
-    return std::move(result.array);
-#else
-    const int maxSize = 32;
-    void* buffer[maxSize];
-
-    int size = backtrace(buffer, maxSize);
-    if (size < kSkipFrames) return {};
-
-    KStdVector<void*> result;
-    result.reserve(size - kSkipFrames);
-    for (int index = kSkipFrames; index < size; ++index) {
-        result.push_back(buffer[index]);
-    }
-    return result;
-#endif
-#endif // !KONAN_NO_BACKTRACE
+_Unwind_Reason_Code kotlin::internal::depthCountCallback(struct _Unwind_Context* context, void* arg) {
+    int* result = reinterpret_cast<int*>(arg);
+    (*result)++;
+    return _URC_NO_REASON;
 }
+#endif
 
 #if ! KONAN_NO_BACKTRACE
 #include <cstdarg>
@@ -183,17 +120,18 @@
 KNativePtr adjustAddressForSourceInfo(KNativePtr address) { return address; }
 #endif
 
-KStdVector<KStdString> kotlin::GetStackTraceStrings(void* const* stackTrace, size_t stackTraceSize) noexcept {
+KStdVector<KStdString> kotlin::GetStackTraceStrings(std_support::span<void* const> stackTrace) noexcept {
 #if KONAN_NO_BACKTRACE
     KStdVector<KStdString> strings;
     strings.push_back("<UNIMPLEMENTED>");
     return strings;
 #else
+    size_t size = stackTrace.size();
     KStdVector<KStdString> strings;
-    strings.reserve(stackTraceSize);
-    if (stackTraceSize > 0) {
+    strings.reserve(size);
+    if (size > 0) {
         SourceInfo buffer[10]; // outside of the loop to avoid calling constructors and destructors each time
-        for (size_t index = 0; index < stackTraceSize; ++index) {
+        for (size_t index = 0; index < size; ++index) {
             KNativePtr address = stackTrace[index];
             if (!address || reinterpret_cast<uintptr_t>(address) == 1) continue;
             address = adjustAddressForSourceInfo(address);
@@ -255,8 +193,8 @@
     // Skip this function.
     constexpr int kSkipFrames = 1;
 #endif
-    auto stackTrace = GetCurrentStackTrace(kSkipFrames);
-    auto stackTraceStrings = GetStackTraceStrings(stackTrace.data(), stackTrace.size());
+    StackTrace trace = StackTrace<>::current(kSkipFrames);
+    auto stackTraceStrings = GetStackTraceStrings(trace.data());
     for (auto& frame : stackTraceStrings) {
         konan::consoleErrorUtf8(frame.c_str(), frame.size());
         konan::consoleErrorf("\n");
diff --git a/kotlin-native/runtime/src/main/cpp/StackTrace.hpp b/kotlin-native/runtime/src/main/cpp/StackTrace.hpp
index 9735796..e18f0de 100644
--- a/kotlin-native/runtime/src/main/cpp/StackTrace.hpp
+++ b/kotlin-native/runtime/src/main/cpp/StackTrace.hpp
@@ -3,18 +3,149 @@
  * that can be found in the LICENSE file.
  */
 
+#ifndef RUNTIME_STACK_TRACE_H
+#define RUNTIME_STACK_TRACE_H
+
+#if KONAN_NO_BACKTRACE
+// Nothing to include
+#elif USE_GCC_UNWIND
+// GCC unwinder for backtrace.
+#include <unwind.h>
+#else
+// Glibc backtrace() function.
+#include <execinfo.h>
+#endif
+
+#include "cpp_support/Span.hpp"
 #include "Memory.h"
 #include "Types.h"
 
 namespace kotlin {
+namespace internal {
 
-// TODO: Instead of KStd* provide allocator-customizable versions, to allow stack memory allocation.
+// TODO: int -> size_t. And other.
+// TODO: uint32_t size - compare with 0 doesn't make sense
+
+#if USE_GCC_UNWIND
+template <class Allocator>
+struct Backtrace {
+    Backtrace(int count, int skip, int maxFramesToCollect, Allocator& allocator) :
+        skipCount(skip), remainingFrames(maxFramesToCollect), array(allocator)  {
+        int size = std::min(count - skipCount, maxFramesToCollect);
+        if (size < 0) { size = 0; }
+        array.reserve(size);
+    }
+
+    int skipCount;
+    int remainingFrames;
+    std::vector<void*, Allocator> array;
+};
+
+_Unwind_Reason_Code depthCountCallback(struct _Unwind_Context* context, void* arg);
+
+template <class Allocator>
+_Unwind_Reason_Code unwindCallback(struct _Unwind_Context* context, void* arg) {
+    auto* backtrace = reinterpret_cast<Backtrace<Allocator>*>(arg);
+    if (backtrace->skipCount > 0) {
+        backtrace->skipCount--;
+        return _URC_NO_REASON;
+    }
+
+    if (backtrace->remainingFrames == 0) {
+        // Just skip frames until the end of the stack.
+        return _URC_NO_REASON;
+    }
+
+#if (__MINGW32__ || __MINGW64__)
+    _Unwind_Ptr address = _Unwind_GetRegionStart(context);
+#else
+    _Unwind_Ptr address = _Unwind_GetIP(context);
+#endif
+    backtrace->setNextElement(address);
+
+    return _URC_NO_REASON;
+}
+#endif
+
+// TODO: this implementation is just a hack, e.g. the result is inexact;
+// however it is better to have an inexact stacktrace than not to have any.
+template <class Allocator>
+NO_INLINE std::vector<void*, Allocator> GetCurrentStackTrace(size_t extraSkipFrames,
+                                                             size_t maxFramesToCollect,
+                                                             const Allocator& allocator) {
+#if KONAN_NO_BACKTRACE
+    return {};
+#else
+    // Skips this function frame + anything asked by the caller.
+    const int kSkipFrames = 1 + extraSkipFrames;
+#if USE_GCC_UNWIND
+    int depth = 0;
+    _Unwind_Backtrace(depthCountCallback, static_cast<void*>(&depth));
+    Backtrace result(depth, kSkipFrames, maxFramesToCollect, allocator);
+    if (result.array.capacity() > 0) {
+        _Unwind_Backtrace(unwindCallback<Allocator>, static_cast<void*>(&result));
+    }
+    return std::move(result.array);
+#else
+    const int maxSize = 32;
+    void* buffer[maxSize];
+
+    std::vector<void*, Allocator> result(0, allocator);
+    int depth = backtrace(buffer, maxSize);
+    if (depth < kSkipFrames) return result;
+
+    size_t size = std::min(static_cast<size_t>(depth - kSkipFrames), maxFramesToCollect);
+    result.reserve(size);
+    for (size_t index = kSkipFrames; index < size + kSkipFrames; ++index) {
+        result.push_back(buffer[index]);
+    }
+    return result;
+#endif
+#endif // !KONAN_NO_BACKTRACE
+}
+
+} // namespace internal
+
 // TODO: Model API as in upcoming https://en.cppreference.com/w/cpp/utility/basic_stacktrace
+template <typename Allocator = KonanAllocator<void*>>
+class StackTrace final : private MoveOnly {
+public:
+    StackTrace() noexcept : buffer_(Allocator{}) {};
+    StackTrace(StackTrace<Allocator>&& other) noexcept : buffer_(std::move(other.buffer_)) {};
 
-KStdVector<void*> GetCurrentStackTrace(int extraSkipFrames) noexcept;
+    StackTrace& operator=(StackTrace<Allocator>&& other) noexcept {
+        buffer_ = std::move(other.buffer_);
+        return *this;
+    }
 
-// TODO: This is asking for a span.
-KStdVector<KStdString> GetStackTraceStrings(void* const* stackTrace, size_t stackTraceSize) noexcept;
+    size_t size() noexcept {
+        return buffer_.size();
+    }
+
+    void*& operator[](size_t index) {
+        return buffer_[index];
+    }
+
+    std_support::span<void*> data() noexcept {
+        return std_support::span<void*>(buffer_.data(), buffer_.size());
+    }
+
+    // TODO: It can throw. Is it ok?
+    NO_INLINE static StackTrace current(size_t skipFrames = 0, const Allocator& allocator = Allocator()) {
+        return StackTrace(internal::GetCurrentStackTrace(skipFrames + 1, std::numeric_limits<size_t>::max(), allocator));
+    }
+
+    NO_INLINE static StackTrace current(size_t skipFrames, size_t maxDepth, const Allocator& allocator = Allocator()) {
+        return StackTrace(internal::GetCurrentStackTrace(skipFrames + 1, maxDepth, allocator));
+    }
+
+private:
+    explicit StackTrace(std::vector<void*, Allocator>&& data) noexcept : buffer_(std::move(data)) {}
+
+    std::vector<void*, Allocator> buffer_;
+};
+
+KStdVector<KStdString> GetStackTraceStrings(const std_support::span<void* const> stackTrace) noexcept;
 
 // It's not always safe to extract SourceInfo during unhandled exception termination.
 void DisallowSourceInfo();
@@ -22,3 +153,5 @@
 void PrintStackTraceStderr();
 
 } // namespace kotlin
+
+#endif // RUNTIME_STACK_TRACE_H
diff --git a/kotlin-native/runtime/src/main/cpp/StackTraceTest.cpp b/kotlin-native/runtime/src/main/cpp/StackTraceTest.cpp
index 69ad1ec..53aa86e 100644
--- a/kotlin-native/runtime/src/main/cpp/StackTraceTest.cpp
+++ b/kotlin-native/runtime/src/main/cpp/StackTraceTest.cpp
@@ -14,16 +14,31 @@
 #include "Porting.h"
 #include "TestSupport.hpp"
 
+#include <iostream>
+
 using namespace kotlin;
 
 namespace {
 
-NO_INLINE KStdVector<void*> GetStackTrace1(int skipFrames) {
-    return GetCurrentStackTrace(skipFrames);
+template <typename Allocator = KonanAllocator<void*>>
+NO_INLINE StackTrace<Allocator> GetStackTrace1(int skipFrames,
+                                               int maxDepth = std::numeric_limits<int>::max(),
+                                               const Allocator& allocator = Allocator()) {
+    return StackTrace<Allocator>::current(skipFrames, maxDepth, allocator);
 }
 
-NO_INLINE KStdVector<void*> GetStackTrace2(int skipFrames) {
-    return GetStackTrace1(skipFrames);
+template <typename Allocator = KonanAllocator<void*>>
+NO_INLINE StackTrace<Allocator> GetStackTrace2(int skipFrames,
+                                               int maxDepth = std::numeric_limits<int>::max(),
+                                               const Allocator& allocator = Allocator()) {
+    return GetStackTrace1(skipFrames, maxDepth, allocator);
+}
+
+template <typename Allocator = KonanAllocator<void*>>
+NO_INLINE StackTrace<Allocator> GetStackTrace3(int skipFrames,
+                                               int maxDepth = std::numeric_limits<int>::max(),
+                                               const Allocator& allocator = Allocator()) {
+    return GetStackTrace2(skipFrames, maxDepth, allocator);
 }
 
 NO_INLINE void AbortWithStackTrace(int) {
@@ -41,7 +56,7 @@
     constexpr int kSkip = 0;
 #endif
     auto stackTrace = GetStackTrace2(kSkip);
-    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data(), stackTrace.size());
+    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data());
     ASSERT_GT(symbolicStackTrace.size(), 0ul);
     EXPECT_THAT(symbolicStackTrace[0], testing::HasSubstr("GetStackTrace1"));
 }
@@ -54,11 +69,93 @@
     constexpr int kSkip = 1;
 #endif
     auto stackTrace = GetStackTrace2(kSkip);
-    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data(), stackTrace.size());
+    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data());
     ASSERT_GT(symbolicStackTrace.size(), 0ul);
     EXPECT_THAT(symbolicStackTrace[0], testing::HasSubstr("GetStackTrace2"));
 }
 
+TEST(StackTraceTest, StackTraceWithMaxDepth) {
+    // TODO: Consider incorporating extra skipping to `GetCurrentStackTrace` on windows.
+#if KONAN_WINDOWS
+    constexpr int kSkip = 1;
+#else
+    constexpr int kSkip = 0;
+#endif
+    auto stackTrace = GetStackTrace3(kSkip, 2);
+    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data());
+    ASSERT_EQ(symbolicStackTrace.size(), 2ul);
+    EXPECT_THAT(symbolicStackTrace[0], testing::HasSubstr("GetStackTrace1"));
+    EXPECT_THAT(symbolicStackTrace[1], testing::HasSubstr("GetStackTrace2"));
+}
+
+TEST(StackTraceTest, StackTraceWithSkipAndMaxDepth) {
+    // TODO: Consider incorporating extra skipping to `GetCurrentStackTrace` on windows.
+#if KONAN_WINDOWS
+    constexpr int kSkip = 2;
+#else
+    constexpr int kSkip = 1;
+#endif
+    auto stackTrace = GetStackTrace3(kSkip, 2);
+    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data());
+    ASSERT_EQ(symbolicStackTrace.size(), 2ul);
+    EXPECT_THAT(symbolicStackTrace[0], testing::HasSubstr("GetStackTrace2"));
+    EXPECT_THAT(symbolicStackTrace[1], testing::HasSubstr("GetStackTrace3"));
+}
+
+TEST(StackTraceTest, StackAllocatedTrace) {
+    // TODO: Consider incorporating extra skipping to `GetCurrentStackTrace` on windows.
+#if KONAN_WINDOWS
+    constexpr int kSkip = 1;
+#else
+    constexpr int kSkip = 0;
+#endif
+
+    StackBuffer<void*, 1> buffer;
+    auto stackTrace = GetStackTrace2(kSkip, 1, buffer.allocator());
+    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data());
+    ASSERT_EQ(symbolicStackTrace.size(), 1ul);
+    EXPECT_THAT(symbolicStackTrace[0], testing::HasSubstr("GetStackTrace1"));
+}
+
+TEST(StackTraceTest, StackAllocatedTraceWithSkip) {
+    // TODO: Consider incorporating extra skipping to `GetCurrentStackTrace` on windows.
+#if KONAN_WINDOWS
+    constexpr int kSkip = 2;
+#else
+    constexpr int kSkip = 1;
+#endif
+
+    StackBuffer<void*, 1> buffer;
+    auto stackTrace = GetStackTrace2(kSkip, 1, buffer.allocator());
+    auto symbolicStackTrace = GetStackTraceStrings(stackTrace.data());
+    ASSERT_EQ(symbolicStackTrace.size(), 1ul);
+    EXPECT_THAT(symbolicStackTrace[0], testing::HasSubstr("GetStackTrace2"));
+}
+
+TEST(StackTraceTest, FailedStackAllocatedTrace) {
+    // TODO: Consider incorporating extra skipping to `GetCurrentStackTrace` on windows.
+#if KONAN_WINDOWS
+    constexpr int kSkip = 1;
+#else
+    constexpr int kSkip = 0;
+#endif
+
+    StackBuffer<void*, 1> buffer;
+    EXPECT_THROW(GetStackTrace2(kSkip, 2, buffer.allocator()), std::bad_array_new_length);
+}
+
+TEST(StackTraceTest, FailedStackAllocatedTraceWithSkip) {
+    // TODO: Consider incorporating extra skipping to `GetCurrentStackTrace` on windows.
+#if KONAN_WINDOWS
+    constexpr int kSkip = 2;
+#else
+    constexpr int kSkip = 1;
+#endif
+
+    StackBuffer<void*, 1> buffer;
+    EXPECT_THROW(GetStackTrace2(kSkip, 2, buffer.allocator()), std::bad_array_new_length);
+}
+
 TEST(StackTraceDeathTest, PrintStackTrace) {
     EXPECT_DEATH(
             { AbortWithStackTrace(0); },
diff --git a/kotlin-native/runtime/src/main/cpp/Utils.hpp b/kotlin-native/runtime/src/main/cpp/Utils.hpp
index 9880130..030ae65 100644
--- a/kotlin-native/runtime/src/main/cpp/Utils.hpp
+++ b/kotlin-native/runtime/src/main/cpp/Utils.hpp
@@ -6,8 +6,12 @@
 #ifndef RUNTIME_UTILS_H
 #define RUNTIME_UTILS_H
 
+#include <array>
+#include <new>
 #include <type_traits>
 
+#include "KAssert.h"
+
 namespace kotlin {
 
 // A helper for implementing classes with disabled copy constructor and copy assignment.
@@ -76,6 +80,71 @@
     T2 oldValue_;
 };
 
+// An adapter that allows allocating STL containers in a preallocated buffer. Useful for stack allocations.
+// TODO: May be make an arena allocator instead?
+template <typename T, size_t Capacity>
+class StackBuffer final : private Pinned {
+    static_assert(std::is_trivially_default_constructible<T>::value);
+
+public:
+    class StackAllocator {
+    public:
+        using value_type = T;
+
+        StackAllocator(const StackAllocator& other) = default;
+        StackAllocator(StackAllocator&& other) noexcept = default;
+
+        StackAllocator& operator=(const StackAllocator& other) = default;
+        StackAllocator& operator=(StackAllocator&& other) noexcept = default;
+
+        bool operator==(const StackAllocator& other) { return buffer_ == other.buffer_; }
+
+        size_t max_size() noexcept { return buffer_->allocated_ ? 0 : Capacity; }
+
+        T* allocate(size_t size) {
+            auto result = buffer_->allocate(size);
+            if (!result) {
+                throw std::bad_array_new_length();
+            }
+            return result;
+        }
+
+        void deallocate(T* ptr, size_t count) noexcept {
+            buffer_->deallocate(ptr, count);
+        }
+
+    private:
+        explicit StackAllocator(StackBuffer<T, Capacity>* buffer) noexcept : buffer_(buffer) {}
+
+        StackBuffer<T, Capacity>* buffer_;
+
+        friend class StackBuffer;
+    };
+
+    StackBuffer() noexcept : allocated_(false) {}
+
+    StackAllocator allocator() {
+        return StackAllocator(this);
+    }
+
+private:
+    T* allocate(size_t size) noexcept {
+        if (allocated_ || size > Capacity) { return nullptr; }
+        allocated_ = true;
+        return buffer_.data();
+    }
+
+    void deallocate(T* ptr, size_t count) noexcept {
+        RuntimeAssert(allocated_ && ptr == buffer_.data(),
+                      "Expected ptr to be allocated in this StackBuffer. ptr: %p, buffer: %p, allocated: %s",
+                      ptr, buffer_.data(), (allocated_) ? "true" : "false");
+        allocated_ = false;
+    }
+
+    std::array<T, Capacity> buffer_;
+    bool allocated_;
+};
+
 } // namespace kotlin
 
 #endif // RUNTIME_UTILS_H
diff --git a/kotlin-native/runtime/src/main/cpp/UtilsTest.cpp b/kotlin-native/runtime/src/main/cpp/UtilsTest.cpp
index 178da99..94aa5c1 100644
--- a/kotlin-native/runtime/src/main/cpp/UtilsTest.cpp
+++ b/kotlin-native/runtime/src/main/cpp/UtilsTest.cpp
@@ -48,3 +48,27 @@
     static_assert(!std::is_move_assignable_v<PinnedImpl>, "Must not be move assignable");
     static_assert(sizeof(PinnedImpl) == sizeof(A), "Must not increase size");
 }
+
+TEST(UtilsTest, BufferAllocatorSmoke) {
+    StackBuffer<int, 2> buffer;
+    auto allocator = buffer.allocator();
+
+    int* array = allocator.allocate(1);
+    EXPECT_NE(array, nullptr);
+    allocator.deallocate(array, 1);
+
+    EXPECT_THROW(allocator.allocate(100), std::bad_array_new_length);
+}
+
+TEST(UtilsTest, BufferAllocatorSeveralAllocations) {
+    StackBuffer<int, 2> buffer;
+    auto allocator = buffer.allocator();
+
+    int* array = allocator.allocate(1);
+    EXPECT_THROW(allocator.allocate(1), std::bad_array_new_length);
+
+    // Expect a successful allocation after a dealloc.
+    allocator.deallocate(array, 1);
+    array = allocator.allocate(1);
+}
+