LET ME RUN PRESUBMIT!!!

PiperOrigin-RevId: 559560115
diff --git a/src/google/protobuf/repeated_ptr_field.cc b/src/google/protobuf/repeated_ptr_field.cc
index 463561f..757cab2 100644
--- a/src/google/protobuf/repeated_ptr_field.cc
+++ b/src/google/protobuf/repeated_ptr_field.cc
@@ -36,6 +36,7 @@
 #include <cstddef>
 #include <cstdint>
 #include <cstring>
+#include <limits>
 
 #include "absl/log/absl_check.h"
 #include "google/protobuf/arena.h"
@@ -53,61 +54,58 @@
 
 void** RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
   int new_size = current_size_ + extend_amount;
-  if (total_size_ >= new_size) {
+  if (Capacity() >= new_size) {
     // N.B.: rep_ is non-nullptr because extend_amount is always > 0, hence
     // total_size must be non-zero since it is lower-bounded by new_size.
     return elements() + current_size_;
   }
 
-  Arena* arena = GetOwningArena();
-  new_size = internal::CalculateReserveSize<void*, kRepHeaderSize>(total_size_,
+  new_size = internal::CalculateReserveSize<void*, kRepHeaderSize>(Capacity(),
                                                                    new_size);
   ABSL_CHECK_LE(static_cast<int64_t>(new_size),
                 static_cast<int64_t>(
                     (std::numeric_limits<size_t>::max() - kRepHeaderSize) /
-                    sizeof(rep()->elements[0])))
+                    sizeof(element_)))
       << "Requested size is too large to fit into size_t.";
-  size_t bytes = kRepHeaderSize + sizeof(rep()->elements[0]) * new_size;
+  size_t bytes = kRepHeaderSize + sizeof(element_) * new_size;
+  Arena* arena;
   Rep* new_rep;
-  void* old_tagged_ptr = tagged_rep_or_elem_;
-  if (arena == nullptr) {
-    internal::SizedPtr res = internal::AllocateAtLeast(bytes);
-    new_size =
-        static_cast<int>((res.n - kRepHeaderSize) / sizeof(rep()->elements[0]));
-    new_rep = reinterpret_cast<Rep*>(res.p);
-  } else {
+  if (has_arena_) {
+    arena = GetOwningArena();
     new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
+  } else {
+    arena = nullptr;
+    internal::SizedPtr res = internal::AllocateAtLeast(bytes);
+    new_size = static_cast<int>((res.n - kRepHeaderSize) / sizeof(element_));
+    new_rep = reinterpret_cast<Rep*>(res.p);
   }
 
-  if (using_sso()) {
-    new_rep->elements[0] = old_tagged_ptr;
-    new_rep->allocated_size = old_tagged_ptr != nullptr ? 1 : 0;
+  new_rep->arena = arena;
+  int new_allocated_size;
+  if (using_sbo_) {
+    new_rep->elements()[0] = element_;
+    new_allocated_size = element_ != nullptr ? 1 : 0;
   } else {
-    if (old_tagged_ptr) {
-      Rep* old_rep = reinterpret_cast<Rep*>(
-          reinterpret_cast<uintptr_t>(old_tagged_ptr) - 1);
-      if (old_rep->allocated_size > 0) {
-        memcpy(new_rep->elements, old_rep->elements,
-               old_rep->allocated_size * sizeof(rep()->elements[0]));
-      }
-      new_rep->allocated_size = old_rep->allocated_size;
+    new_allocated_size = allocated_size_;
+    Rep* old_rep = rep();
+    if (allocated_size_ > 0) {
+      memcpy(new_rep->elements(), elements_,
+             allocated_size_ * sizeof(element_));
+    }
 
-      const size_t old_size =
-          total_size_ * sizeof(rep()->elements[0]) + kRepHeaderSize;
-      if (arena == nullptr) {
-        internal::SizedDelete(old_rep, old_size);
-      } else {
-        arena_->ReturnArrayMemory(old_rep, old_size);
-      }
+    const size_t old_size = total_size_ * sizeof(element_) + kRepHeaderSize;
+    if (arena == nullptr) {
+      internal::SizedDelete(old_rep, old_size);
     } else {
-      new_rep->allocated_size = 0;
+      arena->ReturnArrayMemory(old_rep, old_size);
     }
   }
 
-  tagged_rep_or_elem_ =
-      reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(new_rep) + 1);
+  using_sbo_ = false;
+  allocated_size_ = new_allocated_size;
   total_size_ = new_size;
-  return &new_rep->elements[current_size_];
+  elements_ = new_rep->elements();
+  return elements_ + current_size_;
 }
 
 void RepeatedPtrFieldBase::Reserve(int new_size) {
@@ -117,53 +115,48 @@
 }
 
 void RepeatedPtrFieldBase::DestroyProtos() {
-  ABSL_DCHECK(tagged_rep_or_elem_);
-  ABSL_DCHECK(arena_ == nullptr);
-  if (using_sso()) {
-    delete static_cast<MessageLite*>(tagged_rep_or_elem_);
+  ABSL_DCHECK((using_sbo_ && element_ != nullptr) ||
+              (!using_sbo_ && elements_ != nullptr));
+  ABSL_DCHECK(GetOwningArena() == nullptr);
+  if (using_sbo_) {
+    delete static_cast<MessageLite*>(element_);
 
   } else {
     Rep* r = rep();
-    int n = r->allocated_size;
-    void* const* elements = r->elements;
-    for (int i = 0; i < n; i++) {
-      delete static_cast<MessageLite*>(elements[i]);
+    for (int i = 0; i < allocated_size_; i++) {
+      delete static_cast<MessageLite*>(elements_[i]);
     }
-    const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
+    const size_t size = total_size_ * sizeof(element_) + kRepHeaderSize;
     internal::SizedDelete(r, size);
-    tagged_rep_or_elem_ = nullptr;
+    elements_ = nullptr;
   }
 }
 
 void* RepeatedPtrFieldBase::AddOutOfLineHelper(void* obj) {
-  if (tagged_rep_or_elem_ == nullptr) {
-    ABSL_DCHECK_EQ(current_size_, 0);
-    ABSL_DCHECK(using_sso());
+  if (using_sbo_ && current_size_ == 0) {
     ABSL_DCHECK_EQ(allocated_size(), 0);
     ExchangeCurrentSize(1);
-    tagged_rep_or_elem_ = obj;
+    element_ = obj;
     return obj;
   }
-  if (using_sso() || rep()->allocated_size == total_size_) {
+  if (using_sbo_ || (allocated_size_ == total_size_)) {
     InternalExtend(1);  // Equivalent to "Reserve(total_size_ + 1)"
   }
-  Rep* r = rep();
-  ++r->allocated_size;
-  r->elements[ExchangeCurrentSize(current_size_ + 1)] = obj;
+  ++allocated_size_;
+  elements_[ExchangeCurrentSize(current_size_ + 1)] = obj;
   return obj;
 }
 
 void RepeatedPtrFieldBase::CloseGap(int start, int num) {
-  if (using_sso()) {
+  if (using_sbo_) {
     if (start == 0 && num == 1) {
-      tagged_rep_or_elem_ = nullptr;
+      element_ = nullptr;
     }
   } else {
     // Close up a gap of "num" elements starting at offset "start".
-    Rep* r = rep();
-    for (int i = start + num; i < r->allocated_size; ++i)
-      r->elements[i - num] = r->elements[i];
-    r->allocated_size -= num;
+    for (int i = start + num; i < allocated_size_; ++i)
+      elements_[i - num] = elements_[i];
+    allocated_size_ -= num;
   }
   ExchangeCurrentSize(current_size_ - num);
 }
@@ -173,19 +166,18 @@
     return reinterpret_cast<MessageLite*>(
         element_at(ExchangeCurrentSize(current_size_ + 1)));
   }
-  if (allocated_size() == total_size_) {
-    Reserve(total_size_ + 1);
+  if (allocated_size() == Capacity()) {
+    Reserve(Capacity() + 1);
   }
-  MessageLite* result = prototype
-                            ? prototype->New(arena_)
-                            : Arena::CreateMessage<ImplicitWeakMessage>(arena_);
-  if (using_sso()) {
+  MessageLite* result =
+      prototype ? prototype->New(GetOwningArena())
+                : Arena::CreateMessage<ImplicitWeakMessage>(GetOwningArena());
+  if (using_sbo_) {
     ExchangeCurrentSize(current_size_ + 1);
-    tagged_rep_or_elem_ = result;
+    element_ = result;
   } else {
-    Rep* r = rep();
-    ++r->allocated_size;
-    r->elements[ExchangeCurrentSize(current_size_ + 1)] = result;
+    ++allocated_size_;
+    elements_[ExchangeCurrentSize(current_size_ + 1)] = result;
   }
   return result;
 }
diff --git a/src/google/protobuf/repeated_ptr_field.h b/src/google/protobuf/repeated_ptr_field.h
index 820494f..e1772a9 100644
--- a/src/google/protobuf/repeated_ptr_field.h
+++ b/src/google/protobuf/repeated_ptr_field.h
@@ -172,16 +172,13 @@
   static constexpr int kSSOCapacity = 1;
 
  protected:
-  constexpr RepeatedPtrFieldBase()
-      : arena_(nullptr),
-        current_size_(0),
-        total_size_(kSSOCapacity),
-        tagged_rep_or_elem_(nullptr) {}
-  explicit RepeatedPtrFieldBase(Arena* arena)
+  constexpr RepeatedPtrFieldBase() : RepeatedPtrFieldBase(nullptr) {}
+  constexpr explicit RepeatedPtrFieldBase(Arena* arena)
       : arena_(arena),
+        element_(nullptr),
         current_size_(0),
-        total_size_(kSSOCapacity),
-        tagged_rep_or_elem_(nullptr) {}
+        using_sbo_(true),
+        has_arena_(arena != nullptr) {}
 
   RepeatedPtrFieldBase(const RepeatedPtrFieldBase&) = delete;
   RepeatedPtrFieldBase& operator=(const RepeatedPtrFieldBase&) = delete;
@@ -190,13 +187,13 @@
 #ifndef NDEBUG
     // Try to trigger segfault / asan failure in non-opt builds. If arena_
     // lifetime has ended before the destructor.
-    if (arena_) (void)arena_->SpaceAllocated();
+    if (has_arena_) (void)GetOwningArena()->SpaceAllocated();
 #endif
   }
 
   bool empty() const { return current_size_ == 0; }
   int size() const { return current_size_; }
-  int Capacity() const { return total_size_; }
+  int Capacity() const { return using_sbo_ ? kSSOCapacity : total_size_; }
 
   template <typename TypeHandler>
   const typename TypeHandler::Type& at(int index) const {
@@ -227,7 +224,7 @@
           element_at(ExchangeCurrentSize(current_size_ + 1)));
     }
     typename TypeHandler::Type* result =
-        TypeHandler::NewFromPrototype(prototype, arena_);
+        TypeHandler::NewFromPrototype(prototype, GetOwningArena());
     return reinterpret_cast<typename TypeHandler::Type*>(
         AddOutOfLineHelper(result));
   }
@@ -241,45 +238,45 @@
           std::move(value);
       return;
     }
-    if (allocated_size() == total_size_) {
-      Reserve(total_size_ + 1);
+    if (allocated_size() == Capacity()) {
+      Reserve(Capacity() + 1);
     }
-    if (!using_sso()) ++rep()->allocated_size;
-    typename TypeHandler::Type* result =
-        TypeHandler::New(arena_, std::move(value));
-    element_at(ExchangeCurrentSize(current_size_ + 1)) = result;
+    if (using_sbo_) {
+      element_ = TypeHandler::New(arena_, std::move(value));
+      ExchangeCurrentSize(current_size_ + 1);
+    } else {
+      ++allocated_size_;
+      elements_[ExchangeCurrentSize(current_size_ + 1)] =
+          TypeHandler::New(rep()->arena, std::move(value));
+    }
   }
 
   template <typename TypeHandler>
   void Delete(int index) {
     ABSL_DCHECK_GE(index, 0);
     ABSL_DCHECK_LT(index, current_size_);
-    TypeHandler::Delete(cast<TypeHandler>(element_at(index)), arena_);
+    TypeHandler::Delete(cast<TypeHandler>(element_at(index)), GetOwningArena());
   }
 
   // Must be called from destructor.
   template <typename TypeHandler>
   void Destroy() {
-    if (arena_ != nullptr) return;
+    if (has_arena_) return;
 
-    if (using_sso()) {
-      if (tagged_rep_or_elem_ == nullptr) return;
-      TypeHandler::Delete(cast<TypeHandler>(tagged_rep_or_elem_), nullptr);
+    if (using_sbo_) {
+      if (element_ != nullptr) {
+        TypeHandler::Delete(cast<TypeHandler>(element_), nullptr);
+      }
       return;
     }
-
-    Rep* r = rep();
-    int n = r->allocated_size;
-    void* const* elems = r->elements;
-    for (int i = 0; i < n; i++) {
-      TypeHandler::Delete(cast<TypeHandler>(elems[i]), nullptr);
+    for (int i = 0; i < allocated_size_; i++) {
+      TypeHandler::Delete(cast<TypeHandler>(elements_[i]), nullptr);
     }
-    internal::SizedDelete(r, total_size_ * sizeof(elems[0]) + kRepHeaderSize);
+    internal::SizedDelete(rep(),
+                          total_size_ * sizeof(element_) + kRepHeaderSize);
   }
 
-  bool NeedsDestroy() const {
-    return tagged_rep_or_elem_ != nullptr && arena_ == nullptr;
-  }
+  bool NeedsDestroy() const { return !has_arena_ && allocated_size() > 0; }
   void DestroyProtos();  // implemented in the cc file
 
  public:
@@ -325,7 +322,6 @@
 
   inline void InternalSwap(RepeatedPtrFieldBase* rhs) {
     ABSL_DCHECK(this != rhs);
-
     // Swap all fields at once.
     internal::memswap<sizeof(RepeatedPtrFieldBase)>(
         reinterpret_cast<char*>(this), reinterpret_cast<char*>(rhs));
@@ -343,12 +339,17 @@
   template <typename TypeHandler>
   void AddAllocatedForParse(typename TypeHandler::Type* value) {
     ABSL_DCHECK_EQ(current_size_, allocated_size());
-    if (current_size_ == total_size_) {
+    if (current_size_ == Capacity()) {
       // The array is completely full with no cleared objects, so grow it.
       InternalExtend(1);
     }
-    element_at(current_size_++) = value;
-    if (!using_sso()) ++rep()->allocated_size;
+    if (using_sbo_) {
+      element_ = value;
+    } else {
+      elements_[current_size_] = value;
+      ++allocated_size_;
+    }
+    ++current_size_;
   }
 
  protected:
@@ -420,7 +421,7 @@
   template <typename TypeHandler>
   size_t SpaceUsedExcludingSelfLong() const {
     size_t allocated_bytes =
-        using_sso()
+        using_sbo_
             ? 0
             : static_cast<size_t>(total_size_) * sizeof(void*) + kRepHeaderSize;
     const int n = allocated_size();
@@ -454,24 +455,24 @@
   template <typename TypeHandler>
   void UnsafeArenaAddAllocated(typename TypeHandler::Type* value) {
     // Make room for the new pointer.
-    if (current_size_ == total_size_) {
+    if (current_size_ == Capacity()) {
       // The array is completely full with no cleared objects, so grow it.
-      Reserve(total_size_ + 1);
-      ++rep()->allocated_size;
-    } else if (allocated_size() == total_size_) {
+      Reserve(Capacity() + 1);
+      ++allocated_size_;
+    } else if (allocated_size() == Capacity()) {
       // There is no more space in the pointer array because it contains some
       // cleared objects awaiting reuse.  We don't want to grow the array in
       // this case because otherwise a loop calling AddAllocated() followed by
       // Clear() would leak memory.
-      TypeHandler::Delete(cast<TypeHandler>(element_at(current_size_)), arena_);
+      TypeHandler::Delete(cast<TypeHandler>(element_at(current_size_)),
+                          GetOwningArena());
     } else if (current_size_ < allocated_size()) {
       // We have some cleared objects.  We don't care about their order, so we
       // can just move the first one to the end to make space.
-      element_at(allocated_size()) = element_at(current_size_);
-      ++rep()->allocated_size;
+      elements_[allocated_size_++] = elements_[current_size_];
     } else {
       // There are no cleared objects.
-      if (!using_sso()) ++rep()->allocated_size;
+      if (!using_sbo_) ++allocated_size_;
     }
 
     element_at(ExchangeCurrentSize(current_size_ + 1)) = value;
@@ -492,14 +493,13 @@
     ExchangeCurrentSize(current_size_ - 1);
     typename TypeHandler::Type* result =
         cast<TypeHandler>(element_at(current_size_));
-    if (using_sso()) {
-      tagged_rep_or_elem_ = nullptr;
+    if (using_sbo_) {
+      element_ = nullptr;
     } else {
-      --rep()->allocated_size;
-      if (current_size_ < allocated_size()) {
+      if (current_size_ < --allocated_size_) {
         // There are cleared elements on the end; replace the removed element
         // with the last allocated element.
-        element_at(current_size_) = element_at(allocated_size());
+        elements_[current_size_] = elements_[allocated_size_];
       }
     }
     return result;
@@ -514,30 +514,26 @@
            "RepeatedPtrField not on an arena.";
     ABSL_DCHECK(TypeHandler::GetOwningArena(value) == nullptr)
         << "AddCleared() can only accept values not on an arena.";
-    if (allocated_size() == total_size_) {
-      Reserve(total_size_ + 1);
+    if (allocated_size() == Capacity()) {
+      Reserve(Capacity() + 1);
     }
-    if (using_sso()) {
-      tagged_rep_or_elem_ = value;
+    if (using_sbo_) {
+      element_ = value;
     } else {
-      element_at(rep()->allocated_size++) = value;
+      elements_[allocated_size_++] = value;
     }
   }
 
   template <typename TypeHandler>
   PROTOBUF_NODISCARD typename TypeHandler::Type* ReleaseCleared() {
-    ABSL_DCHECK(GetOwningArena() == nullptr)
+    ABSL_DCHECK(!has_arena_)
         << "ReleaseCleared() can only be used on a RepeatedPtrField not on "
         << "an arena.";
-    ABSL_DCHECK(tagged_rep_or_elem_ != nullptr);
     ABSL_DCHECK_GT(allocated_size(), current_size_);
-    if (using_sso()) {
-      auto* result =
-          reinterpret_cast<typename TypeHandler::Type*>(tagged_rep_or_elem_);
-      tagged_rep_or_elem_ = nullptr;
-      return result;
+    if (using_sbo_) {
+      return cast<TypeHandler>(std::exchange(element_, nullptr));
     } else {
-      return cast<TypeHandler>(element_at(--rep()->allocated_size));
+      return cast<TypeHandler>(elements_[--allocated_size_]);
     }
   }
 
@@ -547,7 +543,7 @@
     Arena* element_arena =
         reinterpret_cast<Arena*>(TypeHandler::GetOwningArena(value));
     Arena* arena = GetOwningArena();
-    if (arena == element_arena && allocated_size() < total_size_) {
+    if (arena == element_arena && allocated_size() < Capacity()) {
       // Fast path: underlying arena representation (tagged pointer) is equal to
       // our arena pointer, and we can add to array without resizing it (at
       // least one slot that is not allocated).
@@ -558,7 +554,7 @@
         elems[allocated_size()] = elems[current_size_];
       }
       elems[ExchangeCurrentSize(current_size_ + 1)] = value;
-      if (!using_sso()) ++rep()->allocated_size;
+      if (!using_sbo_) ++allocated_size_;
     } else {
       AddAllocatedSlowWithCopy<TypeHandler>(value, element_arena, arena);
     }
@@ -569,7 +565,7 @@
       // AddAllocated version that does not implement arena-safe copying
       // behavior.
       typename TypeHandler::Type* value, std::false_type) {
-    if (allocated_size() < total_size_) {
+    if (allocated_size() < Capacity()) {
       // Fast path: underlying arena representation (tagged pointer) is equal to
       // our arena pointer, and we can add to array without resizing it (at
       // least one slot that is not allocated).
@@ -580,7 +576,7 @@
         elems[allocated_size()] = elems[current_size_];
       }
       elems[ExchangeCurrentSize(current_size_ + 1)] = value;
-      if (!using_sso()) ++rep()->allocated_size;
+      if (!using_sbo_) ++allocated_size_;
     } else {
       UnsafeArenaAddAllocated<TypeHandler>(value);
     }
@@ -663,7 +659,9 @@
   inline Arena* GetArena() const { return GetOwningArena(); }
 
  protected:
-  inline Arena* GetOwningArena() const { return arena_; }
+  inline Arena* GetOwningArena() const {
+    return using_sbo_ ? arena_ : rep()->arena;
+  }
 
  private:
   template <typename T> friend class Arena::InternalHelper;
@@ -672,20 +670,6 @@
   using DestructorSkippable_ = void;
 
   static constexpr int kInitialSize = 0;
-  // A few notes on internal representation:
-  //
-  // We use an indirected approach, with struct Rep, to keep
-  // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support
-  // was added; namely, 3 8-byte machine words on x86-64. An instance of Rep is
-  // allocated only when the repeated field is non-empty, and it is a
-  // dynamically-sized struct (the header is directly followed by elements[]).
-  // We place arena_ and current_size_ directly in the object to avoid cache
-  // misses due to the indirection, because these fields are checked frequently.
-  // Placing all fields directly in the RepeatedPtrFieldBase instance would cost
-  // significant performance for memory-sensitive workloads.
-  Arena* arena_;
-  int current_size_;
-  int total_size_;
 
   // Replaces current_size_ with new_size and returns the previous value of
   // current_size_. This function is intended to be the only place where
@@ -697,48 +681,41 @@
   }
 
   struct Rep {
-    int allocated_size;
-    // Here we declare a huge array as a way of approximating C's "flexible
-    // array member" feature without relying on undefined behavior.
-    void* elements[(std::numeric_limits<int>::max() - 2 * sizeof(int)) /
-                   sizeof(void*)];
+    union {
+      Arena* arena;
+      void* unused;
+    };
+    void** elements() { return reinterpret_cast<void**>(this + 1); }
+
+    // Avoid 'implicitly deleted dtor' warnings on certain compilers.
+    ~Rep() = delete;
   };
-  static constexpr size_t kRepHeaderSize = offsetof(Rep, elements);
-  void* const* elements() const {
-    return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements;
-  }
-  void** elements() {
-    return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements;
-  }
+  static constexpr size_t kRepHeaderSize = sizeof(Rep);
+
+  void* const* elements() const { return using_sbo_ ? &element_ : elements_; }
+  void** elements() { return using_sbo_ ? &element_ : elements_; }
 
   void*& element_at(int index) {
-    if (using_sso()) {
+    if (using_sbo_) {
       ABSL_DCHECK_EQ(index, 0);
-      return tagged_rep_or_elem_;
+      return element_;
     }
-    return rep()->elements[index];
+    return elements_[index];
   }
   const void* element_at(int index) const {
     return const_cast<RepeatedPtrFieldBase*>(this)->element_at(index);
   }
 
   int allocated_size() const {
-    return using_sso() ? (tagged_rep_or_elem_ != nullptr ? 1 : 0)
-                       : rep()->allocated_size;
+    return using_sbo_ ? (element_ == nullptr ? 0 : 1) : allocated_size_;
   }
-  Rep* rep() {
-    ABSL_DCHECK(!using_sso());
-    return reinterpret_cast<Rep*>(
-        reinterpret_cast<uintptr_t>(tagged_rep_or_elem_) - 1);
+  // Returns a pointer to the Rep struct.
+  // pre-condition: the Rep must have been allocated.
+  Rep* rep() const {
+    ABSL_DCHECK(!using_sbo_);
+    return reinterpret_cast<Rep*>(reinterpret_cast<char*>(elements_) -
+                                  kRepHeaderSize);
   }
-  const Rep* rep() const {
-    return const_cast<RepeatedPtrFieldBase*>(this)->rep();
-  }
-
-  bool using_sso() const {
-    return (reinterpret_cast<uintptr_t>(tagged_rep_or_elem_) & 1) == 0;
-  }
-  void* tagged_rep_or_elem_;
 
   template <typename TypeHandler>
   static inline typename TypeHandler::Type* cast(void* element) {
@@ -780,7 +757,7 @@
                         allocated_elems);
     ExchangeCurrentSize(current_size_ + other_size);
     if (allocated_size() < current_size_) {
-      rep()->allocated_size = current_size_;
+      allocated_size_ = current_size_;
     }
   }
 
@@ -848,7 +825,25 @@
   friend class AccessorHelper;
   template <typename T>
   friend struct google::protobuf::WeakRepeatedPtrField;
+
+  // TODO: This thing does something very weird and is apparently sensitive to
+  // offset of `current_size_`...
   friend class internal::TcParser;  // TODO(jorg): Remove this friend.
+
+  union {
+    struct {
+      Arena* arena_;
+      void* element_;
+    };
+    struct {
+      int allocated_size_;
+      int total_size_;
+      void** elements_;
+    };
+  };
+  int current_size_;
+  bool using_sbo_;
+  bool has_arena_;
 };
 
 template <typename GenericType>