diff --git a/CMake/AbseilDll.cmake b/CMake/AbseilDll.cmake
index e318d72..df071af 100644
--- a/CMake/AbseilDll.cmake
+++ b/CMake/AbseilDll.cmake
@@ -126,6 +126,7 @@
   "debugging/symbolize.h"
   "debugging/internal/address_is_readable.cc"
   "debugging/internal/address_is_readable.h"
+  "debugging/internal/addresses.h"
   "debugging/internal/bounded_utf8_length_sequence.h"
   "debugging/internal/decode_rust_punycode.cc"
   "debugging/internal/decode_rust_punycode.h"
diff --git a/absl/container/BUILD.bazel b/absl/container/BUILD.bazel
index f1026a5..37b5f2a 100644
--- a/absl/container/BUILD.bazel
+++ b/absl/container/BUILD.bazel
@@ -659,11 +659,13 @@
     copts = ABSL_DEFAULT_COPTS,
     linkopts = ABSL_DEFAULT_LINKOPTS,
     deps = [
+        ":common_policy_traits",
         ":container_memory",
         ":raw_hash_set",
         "//absl/base:config",
         "//absl/base:core_headers",
         "//absl/base:throw_delegate",
+        "//absl/meta:type_traits",
     ],
 )
 
diff --git a/absl/container/CMakeLists.txt b/absl/container/CMakeLists.txt
index 1419df7..39ff083 100644
--- a/absl/container/CMakeLists.txt
+++ b/absl/container/CMakeLists.txt
@@ -720,9 +720,11 @@
     ${ABSL_DEFAULT_COPTS}
   DEPS
     absl::config
+    absl::common_policy_traits
     absl::container_memory
     absl::core_headers
     absl::raw_hash_set
+    absl::type_traits
     absl::throw_delegate
   PUBLIC
 )
diff --git a/absl/container/btree_benchmark.cc b/absl/container/btree_benchmark.cc
index d0dac37..b1525bd 100644
--- a/absl/container/btree_benchmark.cc
+++ b/absl/container/btree_benchmark.cc
@@ -406,22 +406,18 @@
 template <typename T>
 void BM_FwdIter(benchmark::State& state) {
   using V = typename remove_pair_const<typename T::value_type>::type;
-  using R = typename T::value_type const*;
 
   std::vector<V> values = GenerateValues<V>(kBenchmarkValues);
   T container(values.begin(), values.end());
 
   auto iter = container.end();
 
-  R r = nullptr;
-
   while (state.KeepRunning()) {
     if (iter == container.end()) iter = container.begin();
-    r = &(*iter);
-    ++iter;
+    auto *v = &(*iter);
+    benchmark::DoNotOptimize(v);
+    benchmark::DoNotOptimize(++iter);
   }
-
-  benchmark::DoNotOptimize(r);
 }
 
 // Benchmark random range-construction of a container.
diff --git a/absl/container/flat_hash_map_test.cc b/absl/container/flat_hash_map_test.cc
index 10cddcf..5c83c94 100644
--- a/absl/container/flat_hash_map_test.cc
+++ b/absl/container/flat_hash_map_test.cc
@@ -115,23 +115,29 @@
 }
 
 TEST(FlatHashMap, Relocatability) {
-  static_assert(absl::is_trivially_relocatable<int>::value, "");
+  static_assert(absl::is_trivially_relocatable<int>::value);
+#if ABSL_INTERNAL_CPLUSPLUS_LANG <= 202002L
+  // std::pair is not trivially copyable in C++23 in some standard
+  // library versions.
+  // See https://github.com/llvm/llvm-project/pull/95444 for instance.
+  // container_memory.h contains a workaround so what really matters
+  // is the transfer test below.
   static_assert(
-      absl::is_trivially_relocatable<std::pair<const int, int>>::value, "");
+      absl::is_trivially_relocatable<std::pair<const int, int>>::value);
+#endif
   static_assert(
       std::is_same<decltype(absl::container_internal::FlatHashMapPolicy<
                             int, int>::transfer<std::allocator<char>>(nullptr,
                                                                       nullptr,
                                                                       nullptr)),
-                   std::true_type>::value,
-      "");
+                   std::true_type>::value);
 
-    struct NonRelocatable {
-      NonRelocatable() = default;
-      NonRelocatable(NonRelocatable&&) {}
-      NonRelocatable& operator=(NonRelocatable&&) { return *this; }
-      void* self = nullptr;
-    };
+  struct NonRelocatable {
+    NonRelocatable() = default;
+    NonRelocatable(NonRelocatable&&) {}
+    NonRelocatable& operator=(NonRelocatable&&) { return *this; }
+    void* self = nullptr;
+  };
 
   EXPECT_FALSE(absl::is_trivially_relocatable<NonRelocatable>::value);
   EXPECT_TRUE(
diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h
index a742829..bed6be1 100644
--- a/absl/container/internal/btree.h
+++ b/absl/container/internal/btree.h
@@ -2151,50 +2151,54 @@
 
 template <typename N, typename R, typename P>
 void btree_iterator<N, R, P>::increment_slow() {
-  if (node_->is_leaf()) {
-    assert(position_ >= node_->finish());
-    btree_iterator save(*this);
-    while (position_ == node_->finish() && !node_->is_root()) {
-      assert(node_->parent()->child(node_->position()) == node_);
-      position_ = node_->position();
-      node_ = node_->parent();
+  N* node = node_;
+  int position = position_;
+  if (node->is_leaf()) {
+    assert(position >= node->finish());
+    while (position == node->finish() && !node->is_root()) {
+      assert(node->parent()->child(node->position()) == node);
+      position = node->position();
+      node = node->parent();
     }
     // TODO(ezb): assert we aren't incrementing end() instead of handling.
-    if (position_ == node_->finish()) {
-      *this = save;
+    if (position == node->finish()) {
+      return;
     }
   } else {
-    assert(position_ < node_->finish());
-    node_ = node_->child(static_cast<field_type>(position_ + 1));
-    while (node_->is_internal()) {
-      node_ = node_->start_child();
+    assert(position < node->finish());
+    node = node->child(static_cast<field_type>(position + 1));
+    while (node->is_internal()) {
+      node = node->start_child();
     }
-    position_ = node_->start();
+    position = node->start();
   }
+  *this = {node, position};
 }
 
 template <typename N, typename R, typename P>
 void btree_iterator<N, R, P>::decrement_slow() {
-  if (node_->is_leaf()) {
-    assert(position_ <= -1);
-    btree_iterator save(*this);
-    while (position_ < node_->start() && !node_->is_root()) {
-      assert(node_->parent()->child(node_->position()) == node_);
-      position_ = node_->position() - 1;
-      node_ = node_->parent();
+  N* node = node_;
+  int position = position_;
+  if (node->is_leaf()) {
+    assert(position <= -1);
+    while (position < node->start() && !node->is_root()) {
+      assert(node->parent()->child(node->position()) == node);
+      position = node->position() - 1;
+      node = node->parent();
     }
     // TODO(ezb): assert we aren't decrementing begin() instead of handling.
-    if (position_ < node_->start()) {
-      *this = save;
+    if (position < node->start()) {
+      return;
     }
   } else {
-    assert(position_ >= node_->start());
-    node_ = node_->child(static_cast<field_type>(position_));
-    while (node_->is_internal()) {
-      node_ = node_->child(node_->finish());
+    assert(position >= node->start());
+    node = node->child(static_cast<field_type>(position));
+    while (node->is_internal()) {
+      node = node->child(node->finish());
     }
-    position_ = node_->finish() - 1;
+    position = node->finish() - 1;
   }
+  *this = {node, position};
 }
 
 template <typename N, typename R, typename P>
diff --git a/absl/container/internal/btree_container.h b/absl/container/internal/btree_container.h
index a68ce44..5e2abb2 100644
--- a/absl/container/internal/btree_container.h
+++ b/absl/container/internal/btree_container.h
@@ -18,6 +18,7 @@
 #include <algorithm>
 #include <initializer_list>
 #include <iterator>
+#include <type_traits>
 #include <utility>
 
 #include "absl/base/attributes.h"
@@ -451,6 +452,29 @@
   template <class K>
   using key_arg = typename super_type::template key_arg<K>;
 
+  // NOTE: The mess here is to shorten the code for the (very repetitive)
+  // function overloads, and to allow the lifetime-bound overloads to dispatch
+  // to the non-lifetime-bound overloads, to ensure there is a single source of
+  // truth for each overload set.
+  //
+  // Enabled if an assignment from the given type would require the
+  // source object to remain alive for the life of the element.
+  //
+  // TODO(b/402804213): Remove these traits and simplify the overloads whenever
+  // we have a better mechanism available to handle lifetime analysis.
+  template <class K, bool Value, typename = void>
+  using LifetimeBoundK =
+      HasValue<Value, type_traits_internal::IsLifetimeBoundAssignment<
+                          typename Tree::key_type, K>>;
+  template <class M, bool Value, typename = void>
+  using LifetimeBoundV =
+      HasValue<Value, type_traits_internal::IsLifetimeBoundAssignment<
+                          typename Tree::params_type::mapped_type, M>>;
+  template <class K, bool KValue, class M, bool MValue, typename... Dummy>
+  using LifetimeBoundKV =
+      absl::conjunction<LifetimeBoundK<K, KValue, absl::void_t<Dummy...>>,
+                        LifetimeBoundV<M, MValue>>;
+
  public:
   using key_type = typename Tree::key_type;
   using mapped_type = typename params_type::mapped_type;
@@ -464,85 +488,163 @@
   using super_type::super_type;
   btree_map_container() {}
 
+  // TODO(b/402804213): Remove these macros whenever we have a better mechanism
+  // available to handle lifetime analysis.
+#define ABSL_INTERNAL_X(Func, Callee, KQual, MQual, KValue, MValue, ...)     \
+  template <                                                                 \
+      typename K = key_type, class M,                                        \
+      ABSL_INTERNAL_IF_##KValue##_NOR_##MValue(                              \
+          int = (EnableIf<LifetimeBoundKV<K, KValue, M, MValue,              \
+                                          IfRRef<int KQual>::AddPtr<K>,      \
+                                          IfRRef<int MQual>::AddPtr<M>>>()), \
+          ABSL_INTERNAL_SINGLE_ARG(                                          \
+              int &...,                                                      \
+              decltype(EnableIf<LifetimeBoundKV<K, KValue, M, MValue>>()) =  \
+                  0))>                                                       \
+  decltype(auto) Func(                                                       \
+      __VA_ARGS__ key_arg<K> KQual k ABSL_INTERNAL_IF_##KValue(              \
+          ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this)),                        \
+      M MQual obj ABSL_INTERNAL_IF_##MValue(                                 \
+          ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this)))                        \
+      ABSL_ATTRIBUTE_LIFETIME_BOUND {                                        \
+    return ABSL_INTERNAL_IF_##KValue##_OR_##MValue(                          \
+        (this->template Func<K, M, 0>), Callee)(                             \
+        __VA_ARGS__ std::forward<decltype(k)>(k),                            \
+        std::forward<decltype(obj)>(obj));                                   \
+  }                                                                          \
+  friend struct std::enable_if<false> /* just to force a semicolon */
   // Insertion routines.
   // Note: the nullptr template arguments and extra `const M&` overloads allow
   // for supporting bitfield arguments.
-  template <typename K = key_type, class M>
-  std::pair<iterator, bool> insert_or_assign(const key_arg<K> &k, const M &obj)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(k, obj);
-  }
-  template <typename K = key_type, class M, K * = nullptr>
-  std::pair<iterator, bool> insert_or_assign(key_arg<K> &&k, const M &obj)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(std::forward<K>(k), obj);
-  }
-  template <typename K = key_type, class M, M * = nullptr>
-  std::pair<iterator, bool> insert_or_assign(const key_arg<K> &k, M &&obj)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(k, std::forward<M>(obj));
-  }
-  template <typename K = key_type, class M, K * = nullptr, M * = nullptr>
-  std::pair<iterator, bool> insert_or_assign(key_arg<K> &&k, M &&obj)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(std::forward<K>(k), std::forward<M>(obj));
-  }
-  template <typename K = key_type, class M>
-  iterator insert_or_assign(const_iterator hint, const key_arg<K> &k,
-                            const M &obj) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_hint_impl(hint, k, obj);
-  }
-  template <typename K = key_type, class M, K * = nullptr>
-  iterator insert_or_assign(const_iterator hint, key_arg<K> &&k,
-                            const M &obj) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_hint_impl(hint, std::forward<K>(k), obj);
-  }
-  template <typename K = key_type, class M, M * = nullptr>
-  iterator insert_or_assign(const_iterator hint, const key_arg<K> &k,
-                            M &&obj) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_hint_impl(hint, k, std::forward<M>(obj));
-  }
-  template <typename K = key_type, class M, K * = nullptr, M * = nullptr>
-  iterator insert_or_assign(const_iterator hint, key_arg<K> &&k,
-                            M &&obj) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_hint_impl(hint, std::forward<K>(k),
-                                      std::forward<M>(obj));
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  false, false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  false, true);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  true, false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  true, true);
 
-  template <typename K = key_type, typename... Args,
-            typename absl::enable_if_t<
-                !std::is_convertible<K, const_iterator>::value, int> = 0>
-  std::pair<iterator, bool> try_emplace(const key_arg<K> &k, Args &&...args)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return try_emplace_impl(k, std::forward<Args>(args)...);
-  }
-  template <typename K = key_type, typename... Args,
-            typename absl::enable_if_t<
-                !std::is_convertible<K, const_iterator>::value, int> = 0>
-  std::pair<iterator, bool> try_emplace(key_arg<K> &&k, Args &&...args)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return try_emplace_impl(std::forward<K>(k), std::forward<Args>(args)...);
-  }
-  template <typename K = key_type, typename... Args>
-  iterator try_emplace(const_iterator hint, const key_arg<K> &k,
-                       Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return try_emplace_hint_impl(hint, k, std::forward<Args>(args)...);
-  }
-  template <typename K = key_type, typename... Args>
-  iterator try_emplace(const_iterator hint, key_arg<K> &&k,
-                       Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return try_emplace_hint_impl(hint, std::forward<K>(k),
-                                 std::forward<Args>(args)...);
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, false,
+                  false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, false,
+                  true);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, true,
+                  false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, true,
+                  true);
 
-  template <typename K = key_type>
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, false,
+                  false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, false,
+                  true);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, true,
+                  false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, true,
+                  true);
+
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, false,
+                  false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, false, true);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, true, false);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, true, true);
+
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &,
+                  const &, false, false,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &,
+                  const &, false, true,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &,
+                  const &, true, false,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &,
+                  const &, true, true,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &, &&,
+                  false, false, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &, &&,
+                  false, true, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &, &&,
+                  true, false, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, const &, &&,
+                  true, true, const_iterator(hint) ABSL_INTERNAL_COMMA);
+
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, const &,
+                  false, false, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, const &,
+                  false, true, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, const &,
+                  true, false, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, const &,
+                  true, true, const_iterator(hint) ABSL_INTERNAL_COMMA);
+
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, &&, false,
+                  false, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, &&, false,
+                  true, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, &&, true,
+                  false, const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_hint_impl, &&, &&, true,
+                  true, const_iterator(hint) ABSL_INTERNAL_COMMA);
+#undef ABSL_INTERNAL_X
+
+#define ABSL_INTERNAL_X(Func, Callee, KQual, KValue, ...)                      \
+  template <                                                                   \
+      class K = key_type,                                                      \
+      ABSL_INTERNAL_IF_##KValue(                                               \
+          class... Args,                                                       \
+          int = (EnableIf<                                                     \
+                 LifetimeBoundK<K, KValue, IfRRef<int KQual>::AddPtr<K>>>())), \
+      ABSL_INTERNAL_IF_##KValue(                                               \
+          decltype(EnableIf<LifetimeBoundK<                                    \
+                       K, KValue, IfRRef<int KQual>::AddPtr<K>>>()) = 0,       \
+          class... Args),                                                      \
+      std::enable_if_t<!std::is_convertible<K, const_iterator>::value, int> =  \
+          0>                                                                   \
+  decltype(auto) Func(                                                         \
+      __VA_ARGS__ key_arg<K> KQual k ABSL_INTERNAL_IF_##KValue(                \
+          ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this)),                          \
+      Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {                          \
+    return ABSL_INTERNAL_IF_##KValue((this->template Func<K, 0>), Callee)(     \
+        __VA_ARGS__ std::forward<decltype(k)>(k),                              \
+        std::forward<decltype(args)>(args)...);                                \
+  }                                                                            \
+  friend struct std::enable_if<false> /* just to force a semicolon */
+  ABSL_INTERNAL_X(try_emplace, try_emplace_impl, const &, false);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_impl, const &, true);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_impl, &&, false);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_impl, &&, true);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_hint_impl, const &, false,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_hint_impl, const &, true,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_hint_impl, &&, false,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(try_emplace, try_emplace_hint_impl, &&, true,
+                  const_iterator(hint) ABSL_INTERNAL_COMMA);
+#undef ABSL_INTERNAL_X
+
+  template <class K = key_type, int = EnableIf<LifetimeBoundK<K, false>>()>
   mapped_type &operator[](const key_arg<K> &k) ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return try_emplace(k).first->second;
   }
-  template <typename K = key_type>
+  template <class K = key_type, int &..., EnableIf<LifetimeBoundK<K, true>> = 0>
+  mapped_type &operator[](
+      const key_arg<K> &k ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this))
+      ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template operator[]<K, 0>(k);
+  }
+  template <class K = key_type, int = EnableIf<LifetimeBoundK<K, false>>()>
   mapped_type &operator[](key_arg<K> &&k) ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return try_emplace(std::forward<K>(k)).first->second;
   }
+  template <class K = key_type, int &..., EnableIf<LifetimeBoundK<K, true>> = 0>
+  mapped_type &operator[](key_arg<K> &&k ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(
+      this)) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template operator[]<K, 0>(std::forward<K>(k));
+  }
 
   template <typename K = key_type>
   mapped_type &at(const key_arg<K> &key) ABSL_ATTRIBUTE_LIFETIME_BOUND {
diff --git a/absl/container/internal/common.h b/absl/container/internal/common.h
index 9239bb4..5ef6c56 100644
--- a/absl/container/internal/common.h
+++ b/absl/container/internal/common.h
@@ -21,10 +21,53 @@
 #include "absl/meta/type_traits.h"
 #include "absl/types/optional.h"
 
+// TODO(b/402804213): Clean up these macros when no longer needed.
+#define ABSL_INTERNAL_SINGLE_ARG(...) __VA_ARGS__
+
+#define ABSL_INTERNAL_IF_true(if_satisfied, ...) if_satisfied
+#define ABSL_INTERNAL_IF_false(if_satisfied, ...) __VA_ARGS__
+
+#define ABSL_INTERNAL_IF_true_AND_true ABSL_INTERNAL_IF_true
+#define ABSL_INTERNAL_IF_false_AND_false ABSL_INTERNAL_IF_false
+#define ABSL_INTERNAL_IF_true_AND_false ABSL_INTERNAL_IF_false_AND_false
+#define ABSL_INTERNAL_IF_false_AND_true ABSL_INTERNAL_IF_false_AND_false
+
+#define ABSL_INTERNAL_IF_true_OR_true ABSL_INTERNAL_IF_true
+#define ABSL_INTERNAL_IF_false_OR_false ABSL_INTERNAL_IF_false
+#define ABSL_INTERNAL_IF_true_OR_false ABSL_INTERNAL_IF_true_OR_true
+#define ABSL_INTERNAL_IF_false_OR_true ABSL_INTERNAL_IF_true_OR_true
+
+#define ABSL_INTERNAL_IF_true_NOR_true ABSL_INTERNAL_IF_false_AND_false
+#define ABSL_INTERNAL_IF_false_NOR_false ABSL_INTERNAL_IF_true_AND_true
+#define ABSL_INTERNAL_IF_true_NOR_false ABSL_INTERNAL_IF_false_AND_true
+#define ABSL_INTERNAL_IF_false_NOR_true ABSL_INTERNAL_IF_true_AND_false
+
+#define ABSL_INTERNAL_COMMA ,
+
 namespace absl {
 ABSL_NAMESPACE_BEGIN
 namespace container_internal {
 
+// TODO(b/402804213): Clean up these traits when no longer needed or
+// deduplicate them with absl::functional_internal::EnableIf.
+template <class Cond>
+using EnableIf = std::enable_if_t<Cond::value, int>;
+
+template <bool Value, class T>
+using HasValue = std::conditional_t<Value, T, absl::negation<T>>;
+
+template <class T>
+struct IfRRef {
+  template <class Other>
+  using AddPtr = Other;
+};
+
+template <class T>
+struct IfRRef<T&&> {
+  template <class Other>
+  using AddPtr = Other*;
+};
+
 template <class, class = void>
 struct IsTransparent : std::false_type {};
 template <class T>
diff --git a/absl/container/internal/container_memory.h b/absl/container/internal/container_memory.h
index 8df5c8e..e7ac1db 100644
--- a/absl/container/internal/container_memory.h
+++ b/absl/container/internal/container_memory.h
@@ -433,8 +433,15 @@
   template <class Allocator>
   static auto transfer(Allocator* alloc, slot_type* new_slot,
                        slot_type* old_slot) {
-    auto is_relocatable =
-        typename absl::is_trivially_relocatable<value_type>::type();
+    // This should really just be
+    // typename absl::is_trivially_relocatable<value_type>::type()
+    // but std::pair is not trivially copyable in C++23 in some standard
+    // library versions.
+    // See https://github.com/llvm/llvm-project/pull/95444 for instance.
+    auto is_relocatable = typename std::conjunction<
+        absl::is_trivially_relocatable<typename value_type::first_type>,
+        absl::is_trivially_relocatable<typename value_type::second_type>>::
+        type();
 
     emplace(new_slot);
     if (is_relocatable) {
diff --git a/absl/container/internal/raw_hash_map.h b/absl/container/internal/raw_hash_map.h
index 8ca0849..b42a4f2 100644
--- a/absl/container/internal/raw_hash_map.h
+++ b/absl/container/internal/raw_hash_map.h
@@ -22,8 +22,10 @@
 #include "absl/base/attributes.h"
 #include "absl/base/config.h"
 #include "absl/base/internal/throw_delegate.h"
+#include "absl/container/internal/common_policy_traits.h"
 #include "absl/container/internal/container_memory.h"
 #include "absl/container/internal/raw_hash_set.h"  // IWYU pragma: export
+#include "absl/meta/type_traits.h"
 
 namespace absl {
 ABSL_NAMESPACE_BEGIN
@@ -48,6 +50,31 @@
       typename KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>::
           template type<K, typename Policy::key_type>;
 
+  // NOTE: The mess here is to shorten the code for the (very repetitive)
+  // function overloads, and to allow the lifetime-bound overloads to dispatch
+  // to the non-lifetime-bound overloads, to ensure there is a single source of
+  // truth for each overload set.
+  //
+  // Enabled if an assignment from the given type would require the
+  // source object to remain alive for the life of the element.
+  //
+  // TODO(b/402804213): Remove these traits and simplify the overloads whenever
+  // we have a better mechanism available to handle lifetime analysis.
+  template <class K, bool Value, typename = void>
+  using LifetimeBoundK = HasValue<
+      Value, std::conditional_t<policy_trait_element_is_owner<Policy>::value,
+                                std::false_type,
+                                type_traits_internal::IsLifetimeBoundAssignment<
+                                    typename Policy::key_type, K>>>;
+  template <class V, bool Value, typename = void>
+  using LifetimeBoundV =
+      HasValue<Value, type_traits_internal::IsLifetimeBoundAssignment<
+                          typename Policy::mapped_type, V>>;
+  template <class K, bool KValue, class V, bool VValue, typename... Dummy>
+  using LifetimeBoundKV =
+      absl::conjunction<LifetimeBoundK<K, KValue, absl::void_t<Dummy...>>,
+                        LifetimeBoundV<V, VValue>>;
+
  public:
   using key_type = typename Policy::key_type;
   using mapped_type = typename Policy::mapped_type;
@@ -71,87 +98,175 @@
   //   union { int n : 1; };
   //   flat_hash_map<int, int> m;
   //   m.insert_or_assign(n, n);
-  template <class K = key_type, class V = mapped_type, K* = nullptr,
-            V* = nullptr>
-  std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, V&& v)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(std::forward<K>(k), std::forward<V>(v));
-  }
+  //
+  // TODO(b/402804213): Remove these macros whenever we have a better mechanism
+  // available to handle lifetime analysis.
+#define ABSL_INTERNAL_X(Func, Callee, KQual, VQual, KValue, VValue, Tail, ...) \
+  template <                                                                   \
+      typename K = key_type, class V = mapped_type,                            \
+      ABSL_INTERNAL_IF_##KValue##_NOR_##VValue(                                \
+          int = (EnableIf<LifetimeBoundKV<K, KValue, V, VValue,                \
+                                          IfRRef<int KQual>::AddPtr<K>,        \
+                                          IfRRef<int VQual>::AddPtr<V>>>()),   \
+          ABSL_INTERNAL_SINGLE_ARG(                                            \
+              int &...,                                                        \
+              decltype(EnableIf<LifetimeBoundKV<K, KValue, V, VValue>>()) =    \
+                  0))>                                                         \
+  decltype(auto) Func(                                                         \
+      __VA_ARGS__ key_arg<K> KQual k ABSL_INTERNAL_IF_##KValue(                \
+          ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this)),                          \
+      V VQual v ABSL_INTERNAL_IF_##VValue(ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY( \
+          this))) ABSL_ATTRIBUTE_LIFETIME_BOUND {                              \
+    return ABSL_INTERNAL_IF_##KValue##_OR_##VValue(                            \
+        (this->template Func<K, V, 0>), Callee)(                               \
+        std::forward<decltype(k)>(k), std::forward<decltype(v)>(v)) Tail;      \
+  }                                                                            \
+  static_assert(true, "This is to force a semicolon.")
 
-  template <class K = key_type, class V = mapped_type, K* = nullptr>
-  std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, const V& v)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(std::forward<K>(k), v);
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  false, false, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  false, true, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  true, false, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  true, true, ABSL_INTERNAL_SINGLE_ARG());
 
-  template <class K = key_type, class V = mapped_type, V* = nullptr>
-  std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, V&& v)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(k, std::forward<V>(v));
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, false,
+                  false, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, false,
+                  true, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, true,
+                  false, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, true,
+                  true, ABSL_INTERNAL_SINGLE_ARG());
 
-  template <class K = key_type, class V = mapped_type>
-  std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, const V& v)
-      ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign_impl(k, v);
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, false,
+                  false, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, false,
+                  true, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, true,
+                  false, ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, true,
+                  true, ABSL_INTERNAL_SINGLE_ARG());
 
-  template <class K = key_type, class V = mapped_type, K* = nullptr,
-            V* = nullptr>
-  iterator insert_or_assign(const_iterator, key_arg<K>&& k,
-                            V&& v) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign(std::forward<K>(k), std::forward<V>(v)).first;
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, false, false,
+                  ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, false, true,
+                  ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, true, false,
+                  ABSL_INTERNAL_SINGLE_ARG());
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, true, true,
+                  ABSL_INTERNAL_SINGLE_ARG());
 
-  template <class K = key_type, class V = mapped_type, K* = nullptr>
-  iterator insert_or_assign(const_iterator, key_arg<K>&& k,
-                            const V& v) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign(std::forward<K>(k), v).first;
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  false, false, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  false, true, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  true, false, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, const &,
+                  true, true, .first, const_iterator ABSL_INTERNAL_COMMA);
 
-  template <class K = key_type, class V = mapped_type, V* = nullptr>
-  iterator insert_or_assign(const_iterator, const key_arg<K>& k,
-                            V&& v) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign(k, std::forward<V>(v)).first;
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, false,
+                  false, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, false,
+                  true, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, true,
+                  false, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, const &, &&, true,
+                  true, .first, const_iterator ABSL_INTERNAL_COMMA);
 
-  template <class K = key_type, class V = mapped_type>
-  iterator insert_or_assign(const_iterator, const key_arg<K>& k,
-                            const V& v) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert_or_assign(k, v).first;
-  }
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, false,
+                  false, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, false,
+                  true, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, true,
+                  false, .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, const &, true,
+                  true, .first, const_iterator ABSL_INTERNAL_COMMA);
+
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, false, false,
+                  .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, false, true,
+                  .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, true, false,
+                  .first, const_iterator ABSL_INTERNAL_COMMA);
+  ABSL_INTERNAL_X(insert_or_assign, insert_or_assign_impl, &&, &&, true, true,
+                  .first, const_iterator ABSL_INTERNAL_COMMA);
+#undef ABSL_INTERNAL_X
 
   // All `try_emplace()` overloads make the same guarantees regarding rvalue
   // arguments as `std::unordered_map::try_emplace()`, namely that these
   // functions will not move from rvalue arguments if insertions do not happen.
-  template <class K = key_type, class... Args,
+  template <class K = key_type, int = EnableIf<LifetimeBoundK<K, false, K *>>(),
+            class... Args,
             typename std::enable_if<
-                !std::is_convertible<K, const_iterator>::value, int>::type = 0,
-            K* = nullptr>
-  std::pair<iterator, bool> try_emplace(key_arg<K>&& k, Args&&... args)
+                !std::is_convertible<K, const_iterator>::value, int>::type = 0>
+  std::pair<iterator, bool> try_emplace(key_arg<K> &&k, Args &&...args)
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return try_emplace_impl(std::forward<K>(k), std::forward<Args>(args)...);
   }
 
   template <class K = key_type, class... Args,
+            EnableIf<LifetimeBoundK<K, true, K *>> = 0,
             typename std::enable_if<
                 !std::is_convertible<K, const_iterator>::value, int>::type = 0>
-  std::pair<iterator, bool> try_emplace(const key_arg<K>& k, Args&&... args)
+  std::pair<iterator, bool> try_emplace(
+      key_arg<K> &&k ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this),
+      Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template try_emplace<K, 0>(std::forward<K>(k),
+                                            std::forward<Args>(args)...);
+  }
+
+  template <class K = key_type, int = EnableIf<LifetimeBoundK<K, false>>(),
+            class... Args,
+            typename std::enable_if<
+                !std::is_convertible<K, const_iterator>::value, int>::type = 0>
+  std::pair<iterator, bool> try_emplace(const key_arg<K> &k, Args &&...args)
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return try_emplace_impl(k, std::forward<Args>(args)...);
   }
-
-  template <class K = key_type, class... Args, K* = nullptr>
-  iterator try_emplace(const_iterator, key_arg<K>&& k,
-                       Args&&... args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return try_emplace(std::forward<K>(k), std::forward<Args>(args)...).first;
+  template <class K = key_type, class... Args,
+            EnableIf<LifetimeBoundK<K, true>> = 0,
+            typename std::enable_if<
+                !std::is_convertible<K, const_iterator>::value, int>::type = 0>
+  std::pair<iterator, bool> try_emplace(
+      const key_arg<K> &k ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this),
+      Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template try_emplace<K, 0>(k, std::forward<Args>(args)...);
   }
 
-  template <class K = key_type, class... Args>
-  iterator try_emplace(const_iterator, const key_arg<K>& k,
-                       Args&&... args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+  template <class K = key_type, int = EnableIf<LifetimeBoundK<K, false, K *>>(),
+            class... Args>
+  iterator try_emplace(const_iterator, key_arg<K> &&k,
+                       Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return try_emplace(std::forward<K>(k), std::forward<Args>(args)...).first;
+  }
+  template <class K = key_type, class... Args,
+            EnableIf<LifetimeBoundK<K, true, K *>> = 0>
+  iterator try_emplace(const_iterator hint,
+                       key_arg<K> &&k ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this),
+                       Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template try_emplace<K, 0>(hint, std::forward<K>(k),
+                                            std::forward<Args>(args)...);
+  }
+
+  template <class K = key_type, int = EnableIf<LifetimeBoundK<K, false>>(),
+            class... Args>
+  iterator try_emplace(const_iterator, const key_arg<K> &k,
+                       Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return try_emplace(k, std::forward<Args>(args)...).first;
   }
+  template <class K = key_type, class... Args,
+            EnableIf<LifetimeBoundK<K, true>> = 0>
+  iterator try_emplace(const_iterator hint,
+                       const key_arg<K> &k
+                           ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this),
+                       Args &&...args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template try_emplace<K, 0>(hint, std::forward<K>(k),
+                                            std::forward<Args>(args)...);
+  }
 
   template <class K = key_type, class P = Policy>
   MappedReference<P> at(const key_arg<K>& key) ABSL_ATTRIBUTE_LIFETIME_BOUND {
@@ -174,8 +289,9 @@
     return Policy::value(&*it);
   }
 
-  template <class K = key_type, class P = Policy, K* = nullptr>
-  MappedReference<P> operator[](key_arg<K>&& key)
+  template <class K = key_type, class P = Policy,
+            int = EnableIf<LifetimeBoundK<K, false, K *>>()>
+  MappedReference<P> operator[](key_arg<K> &&key)
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
     // It is safe to use unchecked_deref here because try_emplace
     // will always return an iterator pointing to a valid item in the table,
@@ -183,15 +299,30 @@
     return Policy::value(
         &this->unchecked_deref(try_emplace(std::forward<K>(key)).first));
   }
+  template <class K = key_type, class P = Policy, int &...,
+            EnableIf<LifetimeBoundK<K, true, K *>> = 0>
+  MappedReference<P> operator[](
+      key_arg<K> &&key ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this))
+      ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template operator[]<K, P, 0>(std::forward<K>(key));
+  }
 
-  template <class K = key_type, class P = Policy>
-  MappedReference<P> operator[](const key_arg<K>& key)
+  template <class K = key_type, class P = Policy,
+            int = EnableIf<LifetimeBoundK<K, false>>()>
+  MappedReference<P> operator[](const key_arg<K> &key)
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
     // It is safe to use unchecked_deref here because try_emplace
     // will always return an iterator pointing to a valid item in the table,
     // since it inserts if nothing is found for the given key.
     return Policy::value(&this->unchecked_deref(try_emplace(key).first));
   }
+  template <class K = key_type, class P = Policy, int &...,
+            EnableIf<LifetimeBoundK<K, true>> = 0>
+  MappedReference<P> operator[](
+      const key_arg<K> &key ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this))
+      ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template operator[]<K, P, 0>(key);
+  }
 
  private:
   template <class K, class V>
diff --git a/absl/container/internal/raw_hash_set.cc b/absl/container/internal/raw_hash_set.cc
index 3b894a5..2623763 100644
--- a/absl/container/internal/raw_hash_set.cc
+++ b/absl/container/internal/raw_hash_set.cc
@@ -234,6 +234,11 @@
 
 namespace {
 
+void ResetGrowthLeft(CommonFields& common) {
+  common.growth_info().InitGrowthLeftNoDeleted(
+      CapacityToGrowth(common.capacity()) - common.size());
+}
+
 // Finds guaranteed to exists empty slot from the given position.
 // NOTE: this function is almost never triggered inside of the
 // DropDeletesWithoutResize, so we keep it simple.
@@ -260,8 +265,8 @@
 }
 
 size_t DropDeletesWithoutResizeAndPrepareInsert(CommonFields& common,
-                                                size_t new_hash,
-                                                const PolicyFunctions& policy) {
+                                                const PolicyFunctions& policy,
+                                                size_t new_hash) {
   void* set = &common;
   void* slot_array = common.slot_array();
   const size_t capacity = common.capacity();
@@ -315,13 +320,14 @@
     // If they do, we don't need to move the object as it falls already in the
     // best probe we can.
     const size_t probe_offset = probe(common, hash).offset();
+    const h2_t h2 = H2(hash);
     const auto probe_index = [probe_offset, capacity](size_t pos) {
       return ((pos - probe_offset) & capacity) / Group::kWidth;
     };
 
     // Element doesn't move.
     if (ABSL_PREDICT_TRUE(probe_index(new_i) == probe_index(i))) {
-      SetCtrlInLargeTable(common, i, H2(hash), slot_size);
+      SetCtrlInLargeTable(common, i, h2, slot_size);
       continue;
     }
 
@@ -330,14 +336,14 @@
       // Transfer element to the empty spot.
       // SetCtrl poisons/unpoisons the slots so we have to call it at the
       // right time.
-      SetCtrlInLargeTable(common, new_i, H2(hash), slot_size);
+      SetCtrlInLargeTable(common, new_i, h2, slot_size);
       (*transfer)(set, new_slot_ptr, slot_ptr, 1);
       SetCtrlInLargeTable(common, i, ctrl_t::kEmpty, slot_size);
       // Initialize or change empty space id.
       tmp_space_id = i;
     } else {
       assert(IsDeleted(ctrl[new_i]));
-      SetCtrlInLargeTable(common, new_i, H2(hash), slot_size);
+      SetCtrlInLargeTable(common, new_i, h2, slot_size);
       // Until we are done rehashing, DELETED marks previously FULL slots.
 
       if (tmp_space_id == kUnknownId) {
@@ -409,6 +415,51 @@
   absl::little_endian::Store64(new_ctrl, first_ctrl_bytes);
 }
 
+// Initializes control bytes after SOO to the next capacity.
+// The table must be non-empty SOO.
+ABSL_ATTRIBUTE_ALWAYS_INLINE inline void
+InitializeThreeElementsControlBytesAfterSoo(size_t hash, ctrl_t* new_ctrl) {
+  static constexpr size_t kNewCapacity = NextCapacity(SooCapacity());
+  static_assert(kNewCapacity == 3);
+  static_assert(is_single_group(kNewCapacity));
+  static_assert(SooSlotIndex() == 1);
+
+  static constexpr uint64_t kEmptyXorSentinel =
+      static_cast<uint8_t>(ctrl_t::kEmpty) ^
+      static_cast<uint8_t>(ctrl_t::kSentinel);
+  static constexpr uint64_t kEmpty64 = static_cast<uint8_t>(ctrl_t::kEmpty);
+  static constexpr size_t kMirroredSooSlotIndex =
+      SooSlotIndex() + kNewCapacity + 1;
+  // The first 8 bytes, where present slot positions are replaced with 0.
+  static constexpr uint64_t kFirstCtrlBytesWithZeroes =
+      k8EmptyBytes ^ (kEmpty64 << (8 * SooSlotIndex())) ^
+      (kEmptyXorSentinel << (8 * kNewCapacity)) ^
+      (kEmpty64 << (8 * kMirroredSooSlotIndex));
+
+  const uint64_t h2 = static_cast<uint64_t>(H2(hash));
+  // Fill the original 0th and mirrored 2nd bytes with the hash.
+  // Result will look like:
+  // EHESEHEE
+  // Where H = h2, E = kEmpty, S = kSentinel.
+  const uint64_t first_ctrl_bytes =
+      ((h2 << (8 * SooSlotIndex())) | kFirstCtrlBytesWithZeroes) |
+      (h2 << (8 * kMirroredSooSlotIndex));
+
+  // Fill last bytes with kEmpty.
+  std::memset(new_ctrl + kNewCapacity, static_cast<int8_t>(ctrl_t::kEmpty),
+              Group::kWidth);
+  // Overwrite the first 8 bytes with first_ctrl_bytes.
+  absl::little_endian::Store64(new_ctrl, first_ctrl_bytes);
+
+  // Example for group size 16:
+  // new_ctrl after 1st memset =      ???EEEEEEEEEEEEEEEE
+  // new_ctrl after 2nd store  =      EHESEHEEEEEEEEEEEEE
+
+  // Example for group size 8:
+  // new_ctrl after 1st memset =      ???EEEEEEEE
+  // new_ctrl after 2nd store  =      EHESEHEEEEE
+}
+
 }  // namespace
 
 void EraseMetaOnly(CommonFields& c, size_t index, size_t slot_size) {
@@ -465,9 +516,8 @@
 };
 
 template <ResizeNonSooMode kMode>
-void ResizeNonSooImpl(CommonFields& common, size_t new_capacity,
-                      HashtablezInfoHandle infoz,
-                      const PolicyFunctions& policy) {
+void ResizeNonSooImpl(CommonFields& common, const PolicyFunctions& policy,
+                      size_t new_capacity, HashtablezInfoHandle infoz) {
   assert(IsValidCapacity(new_capacity));
   assert(new_capacity > policy.soo_capacity);
 
@@ -513,9 +563,9 @@
   }
 }
 
-void ResizeEmptyNonAllocatedTableImpl(CommonFields& common, size_t new_capacity,
-                                      bool force_infoz,
-                                      const PolicyFunctions& policy) {
+void ResizeEmptyNonAllocatedTableImpl(CommonFields& common,
+                                      const PolicyFunctions& policy,
+                                      size_t new_capacity, bool force_infoz) {
   assert(IsValidCapacity(new_capacity));
   assert(new_capacity > policy.soo_capacity);
   assert(!force_infoz || policy.soo_capacity > 0);
@@ -529,18 +579,18 @@
     infoz = ForcedTrySample(slot_size, policy.key_size, policy.value_size,
                             policy.soo_capacity);
   }
-  ResizeNonSooImpl<ResizeNonSooMode::kGuaranteedEmpty>(common, new_capacity,
-                                                       infoz, policy);
+  ResizeNonSooImpl<ResizeNonSooMode::kGuaranteedEmpty>(common, policy,
+                                                       new_capacity, infoz);
 }
 
 // If the table was SOO, initializes new control bytes and transfers slot.
 // After transferring the slot, sets control and slots in CommonFields.
 // It is rare to resize an SOO table with one element to a large size.
 // Requires: `c` contains SOO data.
-void InsertOldSooSlotAndInitializeControlBytes(CommonFields& c, size_t hash,
-                                               ctrl_t* new_ctrl,
-                                               void* new_slots,
-                                               const PolicyFunctions& policy) {
+void InsertOldSooSlotAndInitializeControlBytes(CommonFields& c,
+                                               const PolicyFunctions& policy,
+                                               size_t hash, ctrl_t* new_ctrl,
+                                               void* new_slots) {
   assert(c.size() == policy.soo_capacity);
   assert(policy.soo_capacity == SooCapacity());
   size_t new_capacity = c.capacity();
@@ -564,9 +614,9 @@
   kForceSampleNoResizeIfUnsampled,
 };
 
-void ResizeFullSooTable(CommonFields& common, size_t new_capacity,
-                        ResizeFullSooTableSamplingMode sampling_mode,
-                        const PolicyFunctions& policy) {
+void ResizeFullSooTable(CommonFields& common, const PolicyFunctions& policy,
+                        size_t new_capacity,
+                        ResizeFullSooTableSamplingMode sampling_mode) {
   assert(common.capacity() == policy.soo_capacity);
   assert(common.size() == policy.soo_capacity);
   assert(policy.soo_capacity == SooCapacity());
@@ -606,8 +656,8 @@
   const size_t soo_slot_hash =
       policy.hash_slot(policy.hash_fn(common), common.soo_data());
 
-  InsertOldSooSlotAndInitializeControlBytes(common, soo_slot_hash, new_ctrl,
-                                            new_slots, policy);
+  InsertOldSooSlotAndInitializeControlBytes(common, policy, soo_slot_hash,
+                                            new_ctrl, new_slots);
   ResetGrowthLeft(common);
   if (has_infoz) {
     common.set_has_infoz();
@@ -739,8 +789,9 @@
   // new_ctrl after 2nd store =  E0123456EEEEEEESE0123456EEEEEEE
 }
 
-size_t GrowToNextCapacityAndPrepareInsert(CommonFields& common, size_t new_hash,
-                                          const PolicyFunctions& policy) {
+size_t GrowToNextCapacityAndPrepareInsert(CommonFields& common,
+                                          const PolicyFunctions& policy,
+                                          size_t new_hash) {
   assert(common.growth_left() == 0);
   const size_t old_capacity = common.capacity();
   assert(old_capacity == 0 || old_capacity > policy.soo_capacity);
@@ -835,8 +886,9 @@
 // Called whenever the table needs to vacate empty slots either by removing
 // tombstones via rehash or growth to next capacity.
 ABSL_ATTRIBUTE_NOINLINE
-size_t RehashOrGrowToNextCapacityAndPrepareInsert(
-    CommonFields& common, size_t new_hash, const PolicyFunctions& policy) {
+size_t RehashOrGrowToNextCapacityAndPrepareInsert(CommonFields& common,
+                                                  const PolicyFunctions& policy,
+                                                  size_t new_hash) {
   const size_t cap = common.capacity();
   ABSL_ASSUME(cap > 0);
   if (cap > Group::kWidth &&
@@ -883,27 +935,27 @@
     //  762 | 149836       0.37        13 | 148559       0.74       190
     //  807 | 149736       0.39        14 | 151107       0.39        14
     //  852 | 150204       0.42        15 | 151019       0.42        15
-    return DropDeletesWithoutResizeAndPrepareInsert(common, new_hash, policy);
+    return DropDeletesWithoutResizeAndPrepareInsert(common, policy, new_hash);
   } else {
     // Otherwise grow the container.
-    return GrowToNextCapacityAndPrepareInsert(common, new_hash, policy);
+    return GrowToNextCapacityAndPrepareInsert(common, policy, new_hash);
   }
 }
 
 // Slow path for PrepareInsertNonSoo that is called when the table has deleted
 // slots or need to be resized or rehashed.
-size_t PrepareInsertNonSooSlow(CommonFields& common, size_t hash,
-                               const PolicyFunctions& policy) {
+size_t PrepareInsertNonSooSlow(CommonFields& common,
+                               const PolicyFunctions& policy, size_t hash) {
   const GrowthInfo growth_info = common.growth_info();
   assert(!growth_info.HasNoDeletedAndGrowthLeft());
   if (ABSL_PREDICT_TRUE(growth_info.HasNoGrowthLeftAndNoDeleted())) {
     // Table without deleted slots (>95% cases) that needs to be resized.
     assert(growth_info.HasNoDeleted() && growth_info.GetGrowthLeft() == 0);
-    return GrowToNextCapacityAndPrepareInsert(common, hash, policy);
+    return GrowToNextCapacityAndPrepareInsert(common, policy, hash);
   }
   if (ABSL_PREDICT_FALSE(growth_info.HasNoGrowthLeftAssumingMayHaveDeleted())) {
     // Table with deleted slots that needs to be rehashed or resized.
-    return RehashOrGrowToNextCapacityAndPrepareInsert(common, hash, policy);
+    return RehashOrGrowToNextCapacityAndPrepareInsert(common, policy, hash);
   }
   // Table with deleted slots that has space for the inserting element.
   FindInfo target = find_first_non_full(common, hash);
@@ -925,20 +977,19 @@
 }
 
 void ResizeAllocatedTableWithSeedChange(CommonFields& common,
-                                        size_t new_capacity,
-                                        const PolicyFunctions& policy) {
+                                        const PolicyFunctions& policy,
+                                        size_t new_capacity) {
   ResizeNonSooImpl<ResizeNonSooMode::kGuaranteedAllocated>(
-      common, new_capacity, common.infoz(), policy);
+      common, policy, new_capacity, common.infoz());
 }
 
-
 void ReserveEmptyNonAllocatedTableToFitNewSize(CommonFields& common,
-                                               size_t new_size,
-                                               const PolicyFunctions& policy) {
+                                               const PolicyFunctions& policy,
+                                               size_t new_size) {
   ValidateMaxSize(new_size, policy.slot_size);
   ResizeEmptyNonAllocatedTableImpl(
-      common, NormalizeCapacity(GrowthToLowerboundCapacity(new_size)),
-      /*force_infoz=*/false, policy);
+      common, policy, NormalizeCapacity(GrowthToLowerboundCapacity(new_size)),
+      /*force_infoz=*/false);
   // This is after resize, to ensure that we have completed the allocation
   // and have potentially sampled the hashtable.
   common.infoz().RecordReservation(new_size);
@@ -947,17 +998,75 @@
 }
 
 void ReserveEmptyNonAllocatedTableToFitBucketCount(
-    CommonFields& common, size_t bucket_count, const PolicyFunctions& policy) {
+    CommonFields& common, const PolicyFunctions& policy, size_t bucket_count) {
   size_t new_capacity = NormalizeCapacity(bucket_count);
   ValidateMaxSize(CapacityToGrowth(new_capacity), policy.slot_size);
-  ResizeEmptyNonAllocatedTableImpl(common, new_capacity,
-                                   /*force_infoz=*/false, policy);
+  ResizeEmptyNonAllocatedTableImpl(common, policy, new_capacity,
+                                   /*force_infoz=*/false);
 }
 
 void GrowEmptySooTableToNextCapacityForceSampling(
     CommonFields& common, const PolicyFunctions& policy) {
-  ResizeEmptyNonAllocatedTableImpl(common, NextCapacity(SooCapacity()),
-                                   /*force_infoz=*/true, policy);
+  ResizeEmptyNonAllocatedTableImpl(common, policy, NextCapacity(SooCapacity()),
+                                   /*force_infoz=*/true);
+}
+
+// Resizes a full SOO table to the NextCapacity(SooCapacity()).
+template <size_t SooSlotMemcpySize, bool TransferUsesMemcpy>
+void GrowFullSooTableToNextCapacity(CommonFields& common,
+                                    const PolicyFunctions& policy,
+                                    size_t soo_slot_hash) {
+  assert(common.capacity() == policy.soo_capacity);
+  assert(common.size() == policy.soo_capacity);
+  static constexpr size_t kNewCapacity = NextCapacity(SooCapacity());
+  assert(kNewCapacity > policy.soo_capacity);
+  assert(policy.soo_capacity == SooCapacity());
+  const size_t slot_size = policy.slot_size;
+  const size_t slot_align = policy.slot_align;
+  common.set_capacity(kNewCapacity);
+
+  // Since the table is not empty, it will not be sampled.
+  // The decision to sample was already made during the first insertion.
+  RawHashSetLayout layout(kNewCapacity, slot_size, slot_align,
+                          /*has_infoz=*/false);
+  void* alloc = policy.get_char_alloc(common);
+  char* mem = static_cast<char*>(policy.alloc(alloc, layout.alloc_size()));
+  const GenerationType old_generation = common.generation();
+  common.set_generation_ptr(
+      reinterpret_cast<GenerationType*>(mem + layout.generation_offset()));
+  common.set_generation(NextGeneration(old_generation));
+
+  // We do not set control and slots in CommonFields yet to avoid overriding
+  // SOO data.
+  ctrl_t* new_ctrl = reinterpret_cast<ctrl_t*>(mem + layout.control_offset());
+  void* new_slots = mem + layout.slot_offset();
+
+  InitializeThreeElementsControlBytesAfterSoo(soo_slot_hash, new_ctrl);
+
+  SanitizerPoisonMemoryRegion(new_slots, slot_size * kNewCapacity);
+  void* target_slot = SlotAddress(new_slots, SooSlotIndex(), slot_size);
+  SanitizerUnpoisonMemoryRegion(target_slot, slot_size);
+  if constexpr (TransferUsesMemcpy) {
+    // Target slot is placed at index 1, but capacity is at
+    // minimum 3. So we are allowed to copy at least twice as much
+    // memory.
+    static_assert(SooSlotIndex() == 1);
+    static_assert(SooSlotMemcpySize > 0);
+    static_assert(SooSlotMemcpySize <= MaxSooSlotSize());
+    assert(SooSlotMemcpySize <= 2 * slot_size);
+    assert(SooSlotMemcpySize >= slot_size);
+    void* next_slot = SlotAddress(target_slot, 1, slot_size);
+    SanitizerUnpoisonMemoryRegion(next_slot, SooSlotMemcpySize - slot_size);
+    std::memcpy(target_slot, common.soo_data(), SooSlotMemcpySize);
+    SanitizerPoisonMemoryRegion(next_slot, SooSlotMemcpySize - slot_size);
+  } else {
+    static_assert(SooSlotMemcpySize == 0);
+    policy.transfer(&common, target_slot, common.soo_data(), 1);
+  }
+  common.set_control</*kGenerateSeed=*/true>(new_ctrl);
+  common.set_slots(new_slots);
+
+  ResetGrowthLeft(common);
 }
 
 void GrowFullSooTableToNextCapacityForceSampling(
@@ -966,11 +1075,11 @@
   assert(common.size() == policy.soo_capacity);
   assert(policy.soo_capacity == SooCapacity());
   ResizeFullSooTable(
-      common, NextCapacity(SooCapacity()),
-      ResizeFullSooTableSamplingMode::kForceSampleNoResizeIfUnsampled, policy);
+      common, policy, NextCapacity(SooCapacity()),
+      ResizeFullSooTableSamplingMode::kForceSampleNoResizeIfUnsampled);
 }
 
-void Rehash(CommonFields& common, size_t n, const PolicyFunctions& policy) {
+void Rehash(CommonFields& common, const PolicyFunctions& policy, size_t n) {
   const size_t cap = common.capacity();
 
   auto clear_backing_array = [&]() {
@@ -992,8 +1101,8 @@
         static constexpr size_t kInitialSampledCapacity =
             NextCapacity(SooCapacity());
         if (cap > kInitialSampledCapacity) {
-          ResizeAllocatedTableWithSeedChange(common, kInitialSampledCapacity,
-                                             policy);
+          ResizeAllocatedTableWithSeedChange(common, policy,
+                                             kInitialSampledCapacity);
         }
         // This asserts that we didn't lose sampling coverage in `resize`.
         assert(common.infoz().IsSampled());
@@ -1022,14 +1131,14 @@
   if (n == 0 || new_capacity > cap) {
     if (cap == policy.soo_capacity) {
       if (common.empty()) {
-        ResizeEmptyNonAllocatedTableImpl(common, new_capacity,
-                                         /*force_infoz=*/false, policy);
+        ResizeEmptyNonAllocatedTableImpl(common, policy, new_capacity,
+                                         /*force_infoz=*/false);
       } else {
-        ResizeFullSooTable(common, new_capacity,
-                           ResizeFullSooTableSamplingMode::kNoSampling, policy);
+        ResizeFullSooTable(common, policy, new_capacity,
+                           ResizeFullSooTableSamplingMode::kNoSampling);
       }
     } else {
-      ResizeAllocatedTableWithSeedChange(common, new_capacity, policy);
+      ResizeAllocatedTableWithSeedChange(common, policy, new_capacity);
     }
     // This is after resize, to ensure that we have completed the allocation
     // and have potentially sampled the hashtable.
@@ -1037,8 +1146,8 @@
   }
 }
 
-void ReserveAllocatedTable(CommonFields& common, size_t n,
-                           const PolicyFunctions& policy) {
+void ReserveAllocatedTable(CommonFields& common, const PolicyFunctions& policy,
+                           size_t n) {
   common.reset_reserved_growth(n);
   common.set_reservation_size(n);
 
@@ -1055,17 +1164,17 @@
   const size_t new_capacity = NormalizeCapacity(GrowthToLowerboundCapacity(n));
   if (cap == policy.soo_capacity) {
     assert(!common.empty());
-    ResizeFullSooTable(common, new_capacity,
-                       ResizeFullSooTableSamplingMode::kNoSampling, policy);
+    ResizeFullSooTable(common, policy, new_capacity,
+                       ResizeFullSooTableSamplingMode::kNoSampling);
   } else {
     assert(cap > policy.soo_capacity);
-    ResizeAllocatedTableWithSeedChange(common, new_capacity, policy);
+    ResizeAllocatedTableWithSeedChange(common, policy, new_capacity);
   }
   common.infoz().RecordReservation(n);
 }
 
-size_t PrepareInsertNonSoo(CommonFields& common, size_t hash,
-                           const PolicyFunctions& policy, FindInfo target) {
+size_t PrepareInsertNonSoo(CommonFields& common, const PolicyFunctions& policy,
+                           size_t hash, FindInfo target) {
   const bool rehash_for_bug_detection =
       common.should_rehash_for_bug_detection_on_insert() &&
       // Required to allow use of ResizeAllocatedTable.
@@ -1074,7 +1183,7 @@
     // Move to a different heap allocation in order to detect bugs.
     const size_t cap = common.capacity();
     ResizeAllocatedTableWithSeedChange(
-        common, common.growth_left() > 0 ? cap : NextCapacity(cap), policy);
+        common, policy, common.growth_left() > 0 ? cap : NextCapacity(cap));
     target = find_first_non_full(common, hash);
   }
 
@@ -1083,7 +1192,7 @@
   // and growth_left is positive, we can insert at the first
   // empty slot in the probe sequence (target).
   if (ABSL_PREDICT_FALSE(!growth_info.HasNoDeletedAndGrowthLeft())) {
-    return PrepareInsertNonSooSlow(common, hash, policy);
+    return PrepareInsertNonSooSlow(common, policy, hash);
   }
   PrepareInsertCommon(common);
   common.growth_info().OverwriteEmptyAsFull();
@@ -1092,17 +1201,58 @@
   return target.offset;
 }
 
-template void GrowFullSooTableToNextCapacity<0, false>(CommonFields&, size_t,
-                                                       const PolicyFunctions&);
-template void GrowFullSooTableToNextCapacity<1, true>(CommonFields&, size_t,
-                                                      const PolicyFunctions&);
-template void GrowFullSooTableToNextCapacity<4, true>(CommonFields&, size_t,
-                                                      const PolicyFunctions&);
-template void GrowFullSooTableToNextCapacity<8, true>(CommonFields&, size_t,
-                                                      const PolicyFunctions&);
-#if UINTPTR_MAX == UINT64_MAX
-template void GrowFullSooTableToNextCapacity<16, true>(CommonFields&, size_t,
-                                                       const PolicyFunctions&);
+namespace {
+// Returns true if the following is true
+// 1. OptimalMemcpySizeForSooSlotTransfer(left) >
+//    OptimalMemcpySizeForSooSlotTransfer(left - 1)
+// 2. OptimalMemcpySizeForSooSlotTransfer(left) are equal for all i in [left,
+// right].
+// This function is used to verify that we have all the possible template
+// instantiations for GrowFullSooTableToNextCapacity.
+// With this verification the problem may be detected at compile time instead of
+// link time.
+constexpr bool VerifyOptimalMemcpySizeForSooSlotTransferRange(size_t left,
+                                                              size_t right) {
+  size_t optimal_size_for_range = OptimalMemcpySizeForSooSlotTransfer(left);
+  if (optimal_size_for_range <= OptimalMemcpySizeForSooSlotTransfer(left - 1)) {
+    return false;
+  }
+  for (size_t i = left + 1; i <= right; ++i) {
+    if (OptimalMemcpySizeForSooSlotTransfer(i) != optimal_size_for_range) {
+      return false;
+    }
+  }
+  return true;
+}
+}  // namespace
+
+// We need to instantiate ALL possible template combinations because we define
+// the function in the cc file.
+template void GrowFullSooTableToNextCapacity<0, false>(CommonFields&,
+                                                       const PolicyFunctions&,
+                                                       size_t);
+template void
+GrowFullSooTableToNextCapacity<OptimalMemcpySizeForSooSlotTransfer(1), true>(
+    CommonFields&, const PolicyFunctions&, size_t);
+
+static_assert(VerifyOptimalMemcpySizeForSooSlotTransferRange(2, 3));
+template void
+GrowFullSooTableToNextCapacity<OptimalMemcpySizeForSooSlotTransfer(3), true>(
+    CommonFields&, const PolicyFunctions&, size_t);
+
+static_assert(VerifyOptimalMemcpySizeForSooSlotTransferRange(4, 8));
+template void
+GrowFullSooTableToNextCapacity<OptimalMemcpySizeForSooSlotTransfer(8), true>(
+    CommonFields&, const PolicyFunctions&, size_t);
+
+#if UINTPTR_MAX == UINT32_MAX
+static_assert(MaxSooSlotSize() == 8);
+#else
+static_assert(VerifyOptimalMemcpySizeForSooSlotTransferRange(9, 16));
+template void
+GrowFullSooTableToNextCapacity<OptimalMemcpySizeForSooSlotTransfer(16), true>(
+    CommonFields&, const PolicyFunctions&, size_t);
+static_assert(MaxSooSlotSize() == 16);
 #endif
 
 }  // namespace container_internal
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 599718b..3f4646f 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -520,8 +520,8 @@
   static constexpr size_t kSizeShift = 64 - kSizeBitCount;
   static constexpr uint64_t kSizeOneNoMetadata = uint64_t{1} << kSizeShift;
   static constexpr uint64_t kMetadataMask = kSizeOneNoMetadata - 1;
-  static constexpr size_t kSeedMask =
-      (size_t{1} << PerTableSeed::kBitCount) - 1;
+  static constexpr uint64_t kSeedMask =
+      (uint64_t{1} << PerTableSeed::kBitCount) - 1;
   // The next bit after the seed.
   static constexpr uint64_t kHasInfozMask = kSeedMask + 1;
   uint64_t data_;
@@ -1431,11 +1431,13 @@
 // represent a real slot. This is important to take into account on
 // `find_first_non_full()`, where we never try
 // `ShouldInsertBackwards()` for small tables.
-inline bool is_small(size_t capacity) { return capacity < Group::kWidth - 1; }
+constexpr bool is_small(size_t capacity) {
+  return capacity < Group::kWidth - 1;
+}
 
 // Whether a table fits entirely into a probing group.
 // Arbitrary order of elements in such tables is correct.
-inline bool is_single_group(size_t capacity) {
+constexpr bool is_single_group(size_t capacity) {
   return capacity <= Group::kWidth;
 }
 
@@ -1486,11 +1488,6 @@
 // performance critical routines.
 FindInfo find_first_non_full_outofline(const CommonFields&, size_t);
 
-inline void ResetGrowthLeft(CommonFields& common) {
-  common.growth_info().InitGrowthLeftNoDeleted(
-      CapacityToGrowth(common.capacity()) - common.size());
-}
-
 // Sets `ctrl` to `{kEmpty, kSentinel, ..., kEmpty}`, marking the entire
 // array as marked as empty.
 inline void ResetCtrl(CommonFields& common, size_t slot_size) {
@@ -1739,13 +1736,13 @@
 //   3. `new_size > policy.soo_capacity`.
 // The table will be attempted to be sampled.
 void ReserveEmptyNonAllocatedTableToFitNewSize(CommonFields& common,
-                                               size_t new_size,
-                                               const PolicyFunctions& policy);
+                                               const PolicyFunctions& policy,
+                                               size_t new_size);
 
 // The same as ReserveEmptyNonAllocatedTableToFitNewSize, but resizes to the
 // next valid capacity after `bucket_count`.
 void ReserveEmptyNonAllocatedTableToFitBucketCount(
-    CommonFields& common, size_t bucket_count, const PolicyFunctions& policy);
+    CommonFields& common, const PolicyFunctions& policy, size_t bucket_count);
 
 // Resizes empty non-allocated SOO table to NextCapacity(SooCapacity()) and
 // forces the table to be sampled.
@@ -1757,7 +1754,7 @@
     CommonFields& common, const PolicyFunctions& policy);
 
 // Type erased version of raw_hash_set::rehash.
-void Rehash(CommonFields& common, size_t n, const PolicyFunctions& policy);
+void Rehash(CommonFields& common, const PolicyFunctions& policy, size_t n);
 
 // Type erased version of raw_hash_set::reserve for tables that have an
 // allocated backing array.
@@ -1765,53 +1762,8 @@
 // Requires:
 //   1. `c.capacity() > policy.soo_capacity` OR `!c.empty()`.
 // Reserving already allocated tables is considered to be a rare case.
-void ReserveAllocatedTable(CommonFields& common, size_t n,
-                           const PolicyFunctions& policy);
-
-// Initializes control bytes after SOO to the next capacity.
-// The table must be non-empty SOO.
-ABSL_ATTRIBUTE_ALWAYS_INLINE inline void
-InitializeThreeElementsControlBytesAfterSoo(size_t hash, ctrl_t* new_ctrl) {
-  static constexpr size_t kNewCapacity = NextCapacity(SooCapacity());
-  static_assert(kNewCapacity == 3);
-  ABSL_SWISSTABLE_ASSERT(is_single_group(kNewCapacity));
-  static_assert(SooSlotIndex() == 1, "");
-
-  static constexpr uint64_t kEmptyXorSentinel =
-      static_cast<uint8_t>(ctrl_t::kEmpty) ^
-      static_cast<uint8_t>(ctrl_t::kSentinel);
-  static constexpr uint64_t kEmpty64 = static_cast<uint8_t>(ctrl_t::kEmpty);
-  static constexpr size_t kMirroredSooSlotIndex =
-      SooSlotIndex() + kNewCapacity + 1;
-  // The first 8 bytes, where present slot positions are replaced with 0.
-  static constexpr uint64_t kFirstCtrlBytesWithZeroes =
-      k8EmptyBytes ^ (kEmpty64 << (8 * SooSlotIndex())) ^
-      (kEmptyXorSentinel << (8 * kNewCapacity)) ^
-      (kEmpty64 << (8 * kMirroredSooSlotIndex));
-
-  const uint64_t h2 = static_cast<uint64_t>(H2(hash));
-  // Fill the original 0th and mirrored 2nd bytes with the hash.
-  // Result will look like:
-  // EHESEHEE
-  // Where H = h2, E = kEmpty, S = kSentinel.
-  const uint64_t first_ctrl_bytes =
-      ((h2 << (8 * SooSlotIndex())) | kFirstCtrlBytesWithZeroes) |
-      (h2 << (8 * kMirroredSooSlotIndex));
-
-  // Fill last bytes with kEmpty.
-  std::memset(new_ctrl + kNewCapacity, static_cast<int8_t>(ctrl_t::kEmpty),
-              Group::kWidth);
-  // Overwrite the first 8 bytes with first_ctrl_bytes.
-  absl::little_endian::Store64(new_ctrl, first_ctrl_bytes);
-
-  // Example for group size 16:
-  // new_ctrl after 1st memset =      ???EEEEEEEEEEEEEEEE
-  // new_ctrl after 2nd store  =      EHESEHEEEEEEEEEEEEE
-
-  // Example for group size 8:
-  // new_ctrl after 1st memset =      ???EEEEEEEE
-  // new_ctrl after 2nd store  =      EHESEHEEEEE
-}
+void ReserveAllocatedTable(CommonFields& common, const PolicyFunctions& policy,
+                           size_t n);
 
 // Returns the optimal size for memcpy when transferring SOO slot.
 // Otherwise, returns the optimal size for memcpy SOO slot transfer
@@ -1849,61 +1801,12 @@
 }
 
 // Resizes a full SOO table to the NextCapacity(SooCapacity()).
+// All possible template combinations are defined in cc file to improve
+// compilation time.
 template <size_t SooSlotMemcpySize, bool TransferUsesMemcpy>
-void GrowFullSooTableToNextCapacity(CommonFields& common, size_t soo_slot_hash,
-                                    const PolicyFunctions& policy) {
-  ABSL_SWISSTABLE_ASSERT(common.capacity() == policy.soo_capacity);
-  ABSL_SWISSTABLE_ASSERT(common.size() == policy.soo_capacity);
-  static constexpr size_t kNewCapacity = NextCapacity(SooCapacity());
-  ABSL_SWISSTABLE_ASSERT(kNewCapacity > policy.soo_capacity);
-  ABSL_SWISSTABLE_ASSERT(policy.soo_capacity == SooCapacity());
-  const size_t slot_size = policy.slot_size;
-  const size_t slot_align = policy.slot_align;
-  common.set_capacity(kNewCapacity);
-
-  // Since the table is not empty, it will not be sampled.
-  // The decision to sample was already made during the first insertion.
-  RawHashSetLayout layout(kNewCapacity, slot_size, slot_align,
-                          /*has_infoz=*/false);
-  void* alloc = policy.get_char_alloc(common);
-  char* mem = static_cast<char*>(policy.alloc(alloc, layout.alloc_size()));
-  const GenerationType old_generation = common.generation();
-  common.set_generation_ptr(
-      reinterpret_cast<GenerationType*>(mem + layout.generation_offset()));
-  common.set_generation(NextGeneration(old_generation));
-
-  // We do not set control and slots in CommonFields yet to avoid overriding
-  // SOO data.
-  ctrl_t* new_ctrl = reinterpret_cast<ctrl_t*>(mem + layout.control_offset());
-  void* new_slots = mem + layout.slot_offset();
-
-  InitializeThreeElementsControlBytesAfterSoo(soo_slot_hash, new_ctrl);
-
-  SanitizerPoisonMemoryRegion(new_slots, slot_size * kNewCapacity);
-  void* target_slot = SlotAddress(new_slots, SooSlotIndex(), slot_size);
-  SanitizerUnpoisonMemoryRegion(target_slot, slot_size);
-  if constexpr (TransferUsesMemcpy) {
-    // Target slot is placed at index 1, but capacity is at
-    // minimum 3. So we are allowed to copy at least twice as much
-    // memory.
-    static_assert(SooSlotIndex() == 1);
-    static_assert(SooSlotMemcpySize > 0);
-    static_assert(SooSlotMemcpySize <= MaxSooSlotSize());
-    ABSL_SWISSTABLE_ASSERT(SooSlotMemcpySize <= 2 * slot_size);
-    ABSL_SWISSTABLE_ASSERT(SooSlotMemcpySize >= slot_size);
-    void* next_slot = SlotAddress(target_slot, 1, slot_size);
-    SanitizerUnpoisonMemoryRegion(next_slot, SooSlotMemcpySize - slot_size);
-    std::memcpy(target_slot, common.soo_data(), SooSlotMemcpySize);
-    SanitizerPoisonMemoryRegion(next_slot, SooSlotMemcpySize - slot_size);
-  } else {
-    static_assert(SooSlotMemcpySize == 0);
-    policy.transfer(&common, target_slot, common.soo_data(), 1);
-  }
-  common.set_control</*kGenerateSeed=*/true>(new_ctrl);
-  common.set_slots(new_slots);
-
-  ResetGrowthLeft(common);
-}
+void GrowFullSooTableToNextCapacity(CommonFields& common,
+                                    const PolicyFunctions& policy,
+                                    size_t soo_slot_hash);
 
 // As `ResizeFullSooTableToNextCapacity`, except that we also force the SOO
 // table to be sampled. SOO tables need to switch from SOO to heap in order to
@@ -1915,8 +1818,8 @@
 // Tables with SOO enabled must have capacity > policy.soo_capacity.
 // No sampling will be performed since table is already allocated.
 void ResizeAllocatedTableWithSeedChange(CommonFields& common,
-                                        size_t new_capacity,
-                                        const PolicyFunctions& policy);
+                                        const PolicyFunctions& policy,
+                                        size_t new_capacity);
 
 inline void PrepareInsertCommon(CommonFields& common) {
   common.increment_size();
@@ -1968,8 +1871,8 @@
 // REQUIRES: Table is not SOO.
 // REQUIRES: At least one non-full slot available.
 // REQUIRES: `target` is a valid empty position to insert.
-size_t PrepareInsertNonSoo(CommonFields& common, size_t hash,
-                           const PolicyFunctions& policy, FindInfo target);
+size_t PrepareInsertNonSoo(CommonFields& common, const PolicyFunctions& policy,
+                           size_t hash, FindInfo target);
 
 // A SwissTable.
 //
@@ -2280,8 +2183,8 @@
       : settings_(CommonFields::CreateDefault<SooEnabled()>(), hash, eq,
                   alloc) {
     if (bucket_count > DefaultCapacity()) {
-      ReserveEmptyNonAllocatedTableToFitBucketCount(common(), bucket_count,
-                                                    GetPolicyFunctions());
+      ReserveEmptyNonAllocatedTableToFitBucketCount(
+          common(), GetPolicyFunctions(), bucket_count);
     }
   }
 
@@ -2592,15 +2495,15 @@
   //   flat_hash_map<std::string, int> m;
   //   m.insert(std::make_pair("abc", 42));
   template <class T,
-            std::enable_if_t<IsDecomposableAndInsertable<T>::value &&
-                                 IsNotBitField<T>::value &&
-                                 !IsLifetimeBoundAssignmentFrom<T>::value,
-                             int> = 0>
+            int = std::enable_if_t<IsDecomposableAndInsertable<T>::value &&
+                                       IsNotBitField<T>::value &&
+                                       !IsLifetimeBoundAssignmentFrom<T>::value,
+                                   int>()>
   std::pair<iterator, bool> insert(T&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return emplace(std::forward<T>(value));
   }
 
-  template <class T,
+  template <class T, int&...,
             std::enable_if_t<IsDecomposableAndInsertable<T>::value &&
                                  IsNotBitField<T>::value &&
                                  IsLifetimeBoundAssignmentFrom<T>::value,
@@ -2608,7 +2511,7 @@
   std::pair<iterator, bool> insert(
       T&& value ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this))
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return emplace(std::forward<T>(value));
+    return this->template insert<T, 0>(std::forward<T>(value));
   }
 
   // This overload kicks in when the argument is a bitfield or an lvalue of
@@ -2622,22 +2525,22 @@
   //   const char* p = "hello";
   //   s.insert(p);
   //
-  template <class T, std::enable_if_t<
+  template <class T, int = std::enable_if_t<
                          IsDecomposableAndInsertable<const T&>::value &&
                              !IsLifetimeBoundAssignmentFrom<const T&>::value,
-                         int> = 0>
+                         int>()>
   std::pair<iterator, bool> insert(const T& value)
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return emplace(value);
   }
-  template <class T,
+  template <class T, int&...,
             std::enable_if_t<IsDecomposableAndInsertable<const T&>::value &&
                                  IsLifetimeBoundAssignmentFrom<const T&>::value,
                              int> = 0>
   std::pair<iterator, bool> insert(
       const T& value ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this))
       ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return emplace(value);
+    return this->template insert<T, 0>(value);
   }
 
   // This overload kicks in when the argument is an rvalue of init_type. Its
@@ -2664,21 +2567,22 @@
 #endif
 
   template <class T,
-            std::enable_if_t<IsDecomposableAndInsertable<T>::value &&
-                                 IsNotBitField<T>::value &&
-                                 !IsLifetimeBoundAssignmentFrom<T>::value,
-                             int> = 0>
+            int = std::enable_if_t<IsDecomposableAndInsertable<T>::value &&
+                                       IsNotBitField<T>::value &&
+                                       !IsLifetimeBoundAssignmentFrom<T>::value,
+                                   int>()>
   iterator insert(const_iterator, T&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
     return insert(std::forward<T>(value)).first;
   }
-  template <class T,
+  template <class T, int&...,
             std::enable_if_t<IsDecomposableAndInsertable<T>::value &&
                                  IsNotBitField<T>::value &&
                                  IsLifetimeBoundAssignmentFrom<T>::value,
                              int> = 0>
-  iterator insert(const_iterator, T&& value ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(
-                                      this)) ABSL_ATTRIBUTE_LIFETIME_BOUND {
-    return insert(std::forward<T>(value)).first;
+  iterator insert(const_iterator hint,
+                  T&& value ABSL_INTERNAL_ATTRIBUTE_CAPTURED_BY(this))
+      ABSL_ATTRIBUTE_LIFETIME_BOUND {
+    return this->template insert<T, 0>(hint, std::forward<T>(value));
   }
 
   template <class T, std::enable_if_t<
@@ -2955,7 +2859,7 @@
               typename AllocTraits::propagate_on_container_swap{});
   }
 
-  void rehash(size_t n) { Rehash(common(), n, GetPolicyFunctions()); }
+  void rehash(size_t n) { Rehash(common(), GetPolicyFunctions(), n); }
 
   void reserve(size_t n) {
     const size_t cap = capacity();
@@ -2963,11 +2867,11 @@
                           // !SooEnabled() implies empty(), so we can skip the
                           // check for optimization.
                           (SooEnabled() && !empty()))) {
-      ReserveAllocatedTable(common(), n, GetPolicyFunctions());
+      ReserveAllocatedTable(common(), GetPolicyFunctions(), n);
     } else {
       if (ABSL_PREDICT_TRUE(n > DefaultCapacity())) {
-        ReserveEmptyNonAllocatedTableToFitNewSize(common(), n,
-                                                  GetPolicyFunctions());
+        ReserveEmptyNonAllocatedTableToFitNewSize(common(),
+                                                  GetPolicyFunctions(), n);
       }
     }
   }
@@ -3206,10 +3110,11 @@
   iterator find_non_soo(const key_arg<K>& key, size_t hash) {
     ABSL_SWISSTABLE_ASSERT(!is_soo());
     auto seq = probe(common(), hash);
+    const h2_t h2 = H2(hash);
     const ctrl_t* ctrl = control();
     while (true) {
       Group g{ctrl + seq.offset()};
-      for (uint32_t i : g.Match(H2(hash))) {
+      for (uint32_t i : g.Match(h2)) {
         if (ABSL_PREDICT_TRUE(PolicyTraits::apply(
                 EqualElement<K>{key, eq_ref()},
                 PolicyTraits::element(slot_array() + seq.offset(i)))))
@@ -3304,7 +3209,7 @@
                                                sizeof(slot_type))
                                          : 0,
                                      PolicyTraits::transfer_uses_memcpy()>(
-          common(), hash_of(soo_slot()), GetPolicyFunctions());
+          common(), GetPolicyFunctions(), hash_of(soo_slot()));
     }
   }
 
@@ -3361,8 +3266,8 @@
     }
     common().increment_generation();
     if (!empty() && common().should_rehash_for_bug_detection_on_move()) {
-      ResizeAllocatedTableWithSeedChange(common(), capacity(),
-                                         GetPolicyFunctions());
+      ResizeAllocatedTableWithSeedChange(common(), GetPolicyFunctions(),
+                                         capacity());
     }
   }
 
@@ -3441,12 +3346,13 @@
   std::pair<iterator, bool> find_or_prepare_insert_non_soo(const K& key) {
     ABSL_SWISSTABLE_ASSERT(!is_soo());
     prefetch_heap_block();
-    auto hash = hash_ref()(key);
+    const size_t hash = hash_ref()(key);
     auto seq = probe(common(), hash);
+    const h2_t h2 = H2(hash);
     const ctrl_t* ctrl = control();
     while (true) {
       Group g{ctrl + seq.offset()};
-      for (uint32_t i : g.Match(H2(hash))) {
+      for (uint32_t i : g.Match(h2)) {
         if (ABSL_PREDICT_TRUE(PolicyTraits::apply(
                 EqualElement<K>{key, eq_ref()},
                 PolicyTraits::element(slot_array() + seq.offset(i)))))
@@ -3456,8 +3362,8 @@
       if (ABSL_PREDICT_TRUE(mask_empty)) {
         size_t target = seq.offset(
             GetInsertionOffset(mask_empty, capacity(), hash, common().seed()));
-        return {iterator_at(PrepareInsertNonSoo(common(), hash,
-                                                GetPolicyFunctions(),
+        return {iterator_at(PrepareInsertNonSoo(common(), GetPolicyFunctions(),
+                                                hash,
                                                 FindInfo{target, seq.index()})),
                 true};
       }
@@ -3836,12 +3742,13 @@
                              const typename Set::key_type& key) {
     if (set.is_soo()) return 0;
     size_t num_probes = 0;
-    size_t hash = set.hash_ref()(key);
+    const size_t hash = set.hash_ref()(key);
     auto seq = probe(set.common(), hash);
+    const h2_t h2 = H2(hash);
     const ctrl_t* ctrl = set.control();
     while (true) {
       container_internal::Group g{ctrl + seq.offset()};
-      for (uint32_t i : g.Match(container_internal::H2(hash))) {
+      for (uint32_t i : g.Match(h2)) {
         if (Traits::apply(
                 typename Set::template EqualElement<typename Set::key_type>{
                     key, set.eq_ref()},
@@ -3876,17 +3783,18 @@
 }  // namespace hashtable_debug_internal
 
 // Extern template instantiations reduce binary size and linker input size.
+// Function definition is in raw_hash_set.cc.
 extern template void GrowFullSooTableToNextCapacity<0, false>(
-    CommonFields&, size_t, const PolicyFunctions&);
+    CommonFields&, const PolicyFunctions&, size_t);
 extern template void GrowFullSooTableToNextCapacity<1, true>(
-    CommonFields&, size_t, const PolicyFunctions&);
+    CommonFields&, const PolicyFunctions&, size_t);
 extern template void GrowFullSooTableToNextCapacity<4, true>(
-    CommonFields&, size_t, const PolicyFunctions&);
+    CommonFields&, const PolicyFunctions&, size_t);
 extern template void GrowFullSooTableToNextCapacity<8, true>(
-    CommonFields&, size_t, const PolicyFunctions&);
+    CommonFields&, const PolicyFunctions&, size_t);
 #if UINTPTR_MAX == UINT64_MAX
 extern template void GrowFullSooTableToNextCapacity<16, true>(
-    CommonFields&, size_t, const PolicyFunctions&);
+    CommonFields&, const PolicyFunctions&, size_t);
 #endif
 
 }  // namespace container_internal
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 7a5edd6..41cc6ec 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -915,6 +915,8 @@
 using NonSooIntTableSlotType = SizedValue<kNonSooSize>;
 static_assert(sizeof(NonSooIntTableSlotType) >= kNonSooSize, "too small");
 using NonSooIntTable = ValueTable<NonSooIntTableSlotType>;
+using SooInt32Table =
+    ValueTable<int32_t, /*kTransferable=*/true, /*kSoo=*/true>;
 using SooIntTable = ValueTable<int64_t, /*kTransferable=*/true, /*kSoo=*/true>;
 using NonMemcpyableSooIntTable =
     ValueTable<int64_t, /*kTransferable=*/false, /*kSoo=*/true>;
@@ -1581,7 +1583,10 @@
 
 TYPED_TEST(SooTest, LargeTable) {
   TypeParam t;
-  for (int64_t i = 0; i != 10000; ++i) t.emplace(i << 40);
+  for (int64_t i = 0; i != 10000; ++i) {
+    t.emplace(i << 40);
+    ASSERT_EQ(t.size(), i + 1);
+  }
   for (int64_t i = 0; i != 10000; ++i)
     ASSERT_EQ(i << 40, static_cast<int64_t>(*t.find(i << 40)));
 }
@@ -2913,14 +2918,20 @@
 template <typename T>
 class RawHashSamplerTest : public testing::Test {};
 
-using RawHashSamplerTestTypes = ::testing::Types<SooIntTable, NonSooIntTable>;
+using RawHashSamplerTestTypes = ::testing::Types<
+    // 32 bits to make sure that table is Soo for 32 bits platform as well.
+    // 64 bits table is not SOO due to alignment.
+    SooInt32Table,
+    NonSooIntTable>;
 TYPED_TEST_SUITE(RawHashSamplerTest, RawHashSamplerTestTypes);
 
 TYPED_TEST(RawHashSamplerTest, Sample) {
-  constexpr bool soo_enabled = std::is_same<SooIntTable, TypeParam>::value;
+  constexpr bool soo_enabled = std::is_same<SooInt32Table, TypeParam>::value;
   // Enable the feature even if the prod default is off.
   SetSamplingRateTo1Percent();
 
+  ASSERT_EQ(TypeParam().capacity(), soo_enabled ? SooCapacity() : 0);
+
   auto& sampler = GlobalHashtablezSampler();
   size_t start_size = 0;
 
@@ -2992,7 +3003,7 @@
 }
 
 std::vector<const HashtablezInfo*> SampleSooMutation(
-    absl::FunctionRef<void(SooIntTable&)> mutate_table) {
+    absl::FunctionRef<void(SooInt32Table&)> mutate_table) {
   // Enable the feature even if the prod default is off.
   SetSamplingRateTo1Percent();
 
@@ -3005,7 +3016,7 @@
     ++start_size;
   });
 
-  std::vector<SooIntTable> tables;
+  std::vector<SooInt32Table> tables;
   for (int i = 0; i < 1000000; ++i) {
     tables.emplace_back();
     mutate_table(tables.back());
@@ -3025,16 +3036,16 @@
 }
 
 TEST(RawHashSamplerTest, SooTableInsertToEmpty) {
-  if (SooIntTable().capacity() != SooCapacity()) {
+  if (SooInt32Table().capacity() != SooCapacity()) {
     CHECK_LT(sizeof(void*), 8) << "missing SOO coverage";
     GTEST_SKIP() << "not SOO on this platform";
   }
   std::vector<const HashtablezInfo*> infos =
-      SampleSooMutation([](SooIntTable& t) { t.insert(1); });
+      SampleSooMutation([](SooInt32Table& t) { t.insert(1); });
 
   for (const HashtablezInfo* info : infos) {
     ASSERT_EQ(info->inline_element_size,
-              sizeof(typename SooIntTable::value_type));
+              sizeof(typename SooInt32Table::value_type));
     ASSERT_EQ(info->soo_capacity, SooCapacity());
     ASSERT_EQ(info->capacity, NextCapacity(SooCapacity()));
     ASSERT_EQ(info->size, 1);
@@ -3046,16 +3057,16 @@
 }
 
 TEST(RawHashSamplerTest, SooTableReserveToEmpty) {
-  if (SooIntTable().capacity() != SooCapacity()) {
+  if (SooInt32Table().capacity() != SooCapacity()) {
     CHECK_LT(sizeof(void*), 8) << "missing SOO coverage";
     GTEST_SKIP() << "not SOO on this platform";
   }
   std::vector<const HashtablezInfo*> infos =
-      SampleSooMutation([](SooIntTable& t) { t.reserve(100); });
+      SampleSooMutation([](SooInt32Table& t) { t.reserve(100); });
 
   for (const HashtablezInfo* info : infos) {
     ASSERT_EQ(info->inline_element_size,
-              sizeof(typename SooIntTable::value_type));
+              sizeof(typename SooInt32Table::value_type));
     ASSERT_EQ(info->soo_capacity, SooCapacity());
     ASSERT_GE(info->capacity, 100);
     ASSERT_EQ(info->size, 0);
@@ -3069,19 +3080,19 @@
 // This tests that reserve on a full SOO table doesn't incorrectly result in new
 // (over-)sampling.
 TEST(RawHashSamplerTest, SooTableReserveToFullSoo) {
-  if (SooIntTable().capacity() != SooCapacity()) {
+  if (SooInt32Table().capacity() != SooCapacity()) {
     CHECK_LT(sizeof(void*), 8) << "missing SOO coverage";
     GTEST_SKIP() << "not SOO on this platform";
   }
   std::vector<const HashtablezInfo*> infos =
-      SampleSooMutation([](SooIntTable& t) {
+      SampleSooMutation([](SooInt32Table& t) {
         t.insert(1);
         t.reserve(100);
       });
 
   for (const HashtablezInfo* info : infos) {
     ASSERT_EQ(info->inline_element_size,
-              sizeof(typename SooIntTable::value_type));
+              sizeof(typename SooInt32Table::value_type));
     ASSERT_EQ(info->soo_capacity, SooCapacity());
     ASSERT_GE(info->capacity, 100);
     ASSERT_EQ(info->size, 1);
@@ -3095,12 +3106,12 @@
 // This tests that rehash(0) on a sampled table with size that fits in SOO
 // doesn't incorrectly result in losing sampling.
 TEST(RawHashSamplerTest, SooTableRehashShrinkWhenSizeFitsInSoo) {
-  if (SooIntTable().capacity() != SooCapacity()) {
+  if (SooInt32Table().capacity() != SooCapacity()) {
     CHECK_LT(sizeof(void*), 8) << "missing SOO coverage";
     GTEST_SKIP() << "not SOO on this platform";
   }
   std::vector<const HashtablezInfo*> infos =
-      SampleSooMutation([](SooIntTable& t) {
+      SampleSooMutation([](SooInt32Table& t) {
         t.reserve(100);
         t.insert(1);
         EXPECT_GE(t.capacity(), 100);
@@ -3109,7 +3120,7 @@
 
   for (const HashtablezInfo* info : infos) {
     ASSERT_EQ(info->inline_element_size,
-              sizeof(typename SooIntTable::value_type));
+              sizeof(typename SooInt32Table::value_type));
     ASSERT_EQ(info->soo_capacity, SooCapacity());
     ASSERT_EQ(info->capacity, NextCapacity(SooCapacity()));
     ASSERT_EQ(info->size, 1);
@@ -4049,6 +4060,18 @@
   }
 }
 
+TEST(HashtableSize, GenerateNewSeedDoesntChangeSize) {
+  size_t size = 1;
+  do {
+    HashtableSize hs(no_seed_empty_tag_t{});
+    hs.increment_size(size);
+    EXPECT_EQ(hs.size(), size);
+    hs.generate_new_seed();
+    EXPECT_EQ(hs.size(), size);
+    size = size * 2 + 1;
+  } while (size < MaxValidSizeFor1ByteSlot());
+}
+
 TEST(Table, MaxValidSize) {
   IntTable t;
   EXPECT_EQ(MaxValidSize(sizeof(IntTable::value_type)), t.max_size());
diff --git a/absl/copts/AbseilConfigureCopts.cmake b/absl/copts/AbseilConfigureCopts.cmake
index a618199..f1d8b26 100644
--- a/absl/copts/AbseilConfigureCopts.cmake
+++ b/absl/copts/AbseilConfigureCopts.cmake
@@ -71,7 +71,7 @@
 endif()
 
 
-if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
+if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "QCC")
   set(ABSL_DEFAULT_COPTS "${ABSL_GCC_FLAGS}")
   set(ABSL_TEST_COPTS "${ABSL_GCC_TEST_FLAGS}")
 elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")  # MATCHES so we get both Clang and AppleClang
diff --git a/absl/debugging/BUILD.bazel b/absl/debugging/BUILD.bazel
index 6e61bb3..01d3230 100644
--- a/absl/debugging/BUILD.bazel
+++ b/absl/debugging/BUILD.bazel
@@ -202,6 +202,7 @@
     ],
     hdrs = [
         "internal/address_is_readable.h",
+        "internal/addresses.h",
         "internal/elf_mem_image.h",
         "internal/vdso_support.h",
     ],
diff --git a/absl/debugging/CMakeLists.txt b/absl/debugging/CMakeLists.txt
index a96b4f3..30eeb5b 100644
--- a/absl/debugging/CMakeLists.txt
+++ b/absl/debugging/CMakeLists.txt
@@ -174,6 +174,7 @@
     debugging_internal
   HDRS
     "internal/address_is_readable.h"
+    "internal/addresses.h"
     "internal/elf_mem_image.h"
     "internal/vdso_support.h"
   SRCS
diff --git a/absl/debugging/internal/addresses.h b/absl/debugging/internal/addresses.h
new file mode 100644
index 0000000..e40c54e
--- /dev/null
+++ b/absl/debugging/internal/addresses.h
@@ -0,0 +1,53 @@
+// Copyright 2025 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_DEBUGGING_INTERNAL_ADDRESSES_H_
+#define ABSL_DEBUGGING_INTERNAL_ADDRESSES_H_
+
+#include <stdint.h>
+
+#include "absl/base/config.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace debugging_internal {
+
+// Removes any metadata (tag bits) from the given pointer, converting it into a
+// user-readable address.
+inline uintptr_t StripPointerMetadata(void* ptr) {
+#if defined(__aarch64__)
+  // When PAC-RET (-mbranch-protection=pac-ret) is enabled, return addresses
+  // stored on the stack will be signed, which means that pointer bits outside
+  // of the virtual address range are potentially set. Since the stacktrace code
+  // is expected to return normal code pointers, this function clears those
+  // bits.
+  register uintptr_t x30 __asm__("x30") = reinterpret_cast<uintptr_t>(ptr);
+  // The normal instruction for clearing PAC bits is XPACI, but for
+  // compatibility with ARM platforms that do not support pointer
+  // authentication, we use the hint space instruction XPACLRI instead. Hint
+  // space instructions behave as NOPs on unsupported platforms.
+#define ABSL_XPACLRI_HINT "hint #0x7;"
+  asm(ABSL_XPACLRI_HINT : "+r"(x30));  // asm("xpaclri" : "+r"(x30));
+#undef ABSL_XPACLRI_HINT
+  return x30;
+#else
+  return reinterpret_cast<uintptr_t>(ptr);
+#endif
+}
+
+}  // namespace debugging_internal
+ABSL_NAMESPACE_END
+}  // namespace absl
+
+#endif  // ABSL_DEBUGGING_INTERNAL_ADDRESSES_H_
diff --git a/absl/debugging/internal/stacktrace_aarch64-inl.inc b/absl/debugging/internal/stacktrace_aarch64-inl.inc
index dccadae..baea9a8 100644
--- a/absl/debugging/internal/stacktrace_aarch64-inl.inc
+++ b/absl/debugging/internal/stacktrace_aarch64-inl.inc
@@ -18,6 +18,7 @@
 
 #include "absl/base/attributes.h"
 #include "absl/debugging/internal/address_is_readable.h"
+#include "absl/debugging/internal/addresses.h"
 #include "absl/debugging/internal/vdso_support.h"  // a no-op on non-elf or non-glibc systems
 #include "absl/debugging/stacktrace.h"
 
@@ -178,22 +179,6 @@
   return new_frame_pointer;
 }
 
-// When PAC-RET (-mbranch-protection=pac-ret) is enabled, return addresses
-// stored on the stack will be signed, which means that pointer bits outside of
-// the VA range are potentially set. Since the stacktrace code is expected to
-// return normal code pointers, this function clears those bits.
-inline void* ClearPacBits(void* ptr) {
-  register void* x30 __asm__("x30") = ptr;
-  // The normal instruction for clearing PAC bits is XPACI, but for
-  // compatibility with ARM platforms that do not support pointer
-  // authentication, we use the hint space instruction XPACLRI instead. Hint
-  // space instructions behave as NOPs on unsupported platforms.
-#define ABSL_XPACLRI_HINT "hint #0x7;"
-  asm(ABSL_XPACLRI_HINT : "+r"(x30));  // asm("xpaclri" : "+r"(x30));
-#undef ABSL_XPACLRI_HINT
-  return x30;
-}
-
 template <bool IS_STACK_FRAMES, bool IS_WITH_CONTEXT>
 // We count on the bottom frame being this one. See the comment
 // at prev_return_address
@@ -235,7 +220,8 @@
     if (skip_count > 0) {
       skip_count--;
     } else {
-      result[n] = ClearPacBits(prev_return_address);
+      result[n] = reinterpret_cast<void *>(
+          absl::debugging_internal::StripPointerMetadata(prev_return_address));
       if (IS_STACK_FRAMES) {
         sizes[n] = static_cast<int>(
             ComputeStackFrameSize(prev_frame_pointer, frame_pointer));
diff --git a/absl/functional/internal/any_invocable.h b/absl/functional/internal/any_invocable.h
index 2fab453..6bfc4d1 100644
--- a/absl/functional/internal/any_invocable.h
+++ b/absl/functional/internal/any_invocable.h
@@ -634,17 +634,12 @@
     absl::enable_if_t<Impl<Sig>::template CallIsNoexceptIfSigIsNoexcept<
         std::reference_wrapper<F>>::value>>;
 
-////////////////////////////////////////////////////////////////////////////////
-//
 // The constraint for checking whether or not a call meets the noexcept
-// callability requirements. This is a preprocessor macro because specifying it
+// callability requirements. We use a preprocessor macro because specifying it
 // this way as opposed to a disjunction/branch can improve the user-side error
 // messages and avoids an instantiation of std::is_nothrow_invocable_r in the
 // cases where the user did not specify a noexcept function type.
 //
-#define ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT(inv_quals, noex) \
-  ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_##noex(inv_quals)
-
 // The disjunction below is because we can't rely on std::is_nothrow_invocable_r
 // to give the right result when ReturnType is non-moveable in toolchains that
 // don't treat non-moveable result types correctly. For example this was the
@@ -698,8 +693,8 @@
     /*SFINAE constraint to check if F is nothrow-invocable when necessary*/    \
     template <class F>                                                         \
     using CallIsNoexceptIfSigIsNoexcept =                                      \
-        TrueAlias<ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT(inv_quals,   \
-                                                                  noex)>;      \
+        TrueAlias<ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_##noex(      \
+            inv_quals)>;                                                       \
                                                                                \
     /*Put the AnyInvocable into an empty state.*/                              \
     Impl() = default;                                                          \
@@ -772,7 +767,6 @@
 #undef ABSL_INTERNAL_ANY_INVOCABLE_IMPL_
 #undef ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_false
 #undef ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_true
-#undef ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT
 
 }  // namespace internal_any_invocable
 ABSL_NAMESPACE_END
diff --git a/absl/hash/internal/hash.h b/absl/hash/internal/hash.h
index 8ca7d84..4db7f0f 100644
--- a/absl/hash/internal/hash.h
+++ b/absl/hash/internal/hash.h
@@ -1099,7 +1099,7 @@
   template <typename T, absl::enable_if_t<IntegralFastPath<T>::value, int> = 0>
   static size_t hash(T value) {
     return static_cast<size_t>(
-        WeakMix(Seed() ^ static_cast<std::make_unsigned_t<T>>(value)));
+        WeakMix(Seed(), static_cast<std::make_unsigned_t<T>>(value)));
   }
 
   // Overload of MixingHashState::hash()
@@ -1152,7 +1152,7 @@
   // optimize Read1To3 and Read4To8 differently for the string case.
   static MixingHashState combine_raw(MixingHashState hash_state,
                                      uint64_t value) {
-    return MixingHashState(WeakMix(hash_state.state_ ^ value));
+    return MixingHashState(WeakMix(hash_state.state_, value));
   }
 
   // Implementation of the base case for combine_contiguous where we actually
@@ -1180,7 +1180,7 @@
       // Empty ranges have no effect.
       return state;
     }
-    return WeakMix(state ^ v);
+    return WeakMix(state, v);
   }
 
   ABSL_ATTRIBUTE_ALWAYS_INLINE static uint64_t CombineContiguousImpl9to16(
@@ -1297,7 +1297,9 @@
 
   // Slightly lower latency than Mix, but with lower quality. The byte swap
   // helps ensure that low bits still have high quality.
-  ABSL_ATTRIBUTE_ALWAYS_INLINE static uint64_t WeakMix(uint64_t n) {
+  ABSL_ATTRIBUTE_ALWAYS_INLINE static uint64_t WeakMix(uint64_t lhs,
+                                                       uint64_t rhs) {
+    const uint64_t n = lhs ^ rhs;
     // WeakMix doesn't work well on 32-bit platforms so just use Mix.
     if constexpr (sizeof(size_t) < 8) return Mix(n, kMul);
 #ifdef __ARM_ACLE
diff --git a/absl/time/time.h b/absl/time/time.h
index d73a204..db17a4c 100644
--- a/absl/time/time.h
+++ b/absl/time/time.h
@@ -620,12 +620,12 @@
 //
 //   absl::Duration d = absl::Milliseconds(1500);
 //   int64_t isec = absl::ToInt64Seconds(d);  // isec == 1
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Nanoseconds(Duration d);
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Microseconds(Duration d);
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Milliseconds(Duration d);
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Seconds(Duration d);
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Minutes(Duration d);
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Hours(Duration d);
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Nanoseconds(Duration d);
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Microseconds(Duration d);
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Milliseconds(Duration d);
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Seconds(Duration d);
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Minutes(Duration d);
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Hours(Duration d);
 
 // ToDoubleNanoseconds()
 // ToDoubleMicroseconds()
@@ -1864,7 +1864,7 @@
   return time_internal::FromUnixDuration(Seconds(t));
 }
 
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Nanoseconds(Duration d) {
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Nanoseconds(Duration d) {
   if (time_internal::GetRepHi(d) >= 0 &&
       time_internal::GetRepHi(d) >> 33 == 0) {
     return (time_internal::GetRepHi(d) * 1000 * 1000 * 1000) +
@@ -1873,7 +1873,8 @@
   return d / Nanoseconds(1);
 }
 
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Microseconds(Duration d) {
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Microseconds(
+    Duration d) {
   if (time_internal::GetRepHi(d) >= 0 &&
       time_internal::GetRepHi(d) >> 43 == 0) {
     return (time_internal::GetRepHi(d) * 1000 * 1000) +
@@ -1883,7 +1884,8 @@
   return d / Microseconds(1);
 }
 
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Milliseconds(Duration d) {
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Milliseconds(
+    Duration d) {
   if (time_internal::GetRepHi(d) >= 0 &&
       time_internal::GetRepHi(d) >> 53 == 0) {
     return (time_internal::GetRepHi(d) * 1000) +
@@ -1893,21 +1895,21 @@
   return d / Milliseconds(1);
 }
 
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Seconds(Duration d) {
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Seconds(Duration d) {
   int64_t hi = time_internal::GetRepHi(d);
   if (time_internal::IsInfiniteDuration(d)) return hi;
   if (hi < 0 && time_internal::GetRepLo(d) != 0) ++hi;
   return hi;
 }
 
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Minutes(Duration d) {
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Minutes(Duration d) {
   int64_t hi = time_internal::GetRepHi(d);
   if (time_internal::IsInfiniteDuration(d)) return hi;
   if (hi < 0 && time_internal::GetRepLo(d) != 0) ++hi;
   return hi / 60;
 }
 
-ABSL_ATTRIBUTE_CONST_FUNCTION inline int64_t ToInt64Hours(Duration d) {
+ABSL_ATTRIBUTE_CONST_FUNCTION constexpr int64_t ToInt64Hours(Duration d) {
   int64_t hi = time_internal::GetRepHi(d);
   if (time_internal::IsInfiniteDuration(d)) return hi;
   if (hi < 0 && time_internal::GetRepLo(d) != 0) ++hi;
diff --git a/ci/linux_clang-latest_libcxx_asan_bazel.sh b/ci/linux_clang-latest_libcxx_asan_bazel.sh
index a47fd1b..c83f3a0 100755
--- a/ci/linux_clang-latest_libcxx_asan_bazel.sh
+++ b/ci/linux_clang-latest_libcxx_asan_bazel.sh
@@ -25,7 +25,7 @@
 fi
 
 if [[ -z ${STD:-} ]]; then
-  STD="c++17 c++20"
+  STD="c++17 c++20 c++23"
 fi
 
 if [[ -z ${COMPILATION_MODE:-} ]]; then
diff --git a/ci/linux_clang-latest_libcxx_bazel.sh b/ci/linux_clang-latest_libcxx_bazel.sh
index d088277..832a9d8 100755
--- a/ci/linux_clang-latest_libcxx_bazel.sh
+++ b/ci/linux_clang-latest_libcxx_bazel.sh
@@ -25,7 +25,7 @@
 fi
 
 if [[ -z ${STD:-} ]]; then
-  STD="c++17 c++20"
+  STD="c++17 c++20 c++23"
 fi
 
 if [[ -z ${COMPILATION_MODE:-} ]]; then
diff --git a/ci/linux_clang-latest_libcxx_tsan_bazel.sh b/ci/linux_clang-latest_libcxx_tsan_bazel.sh
index 5fa8453..82b4dd1 100755
--- a/ci/linux_clang-latest_libcxx_tsan_bazel.sh
+++ b/ci/linux_clang-latest_libcxx_tsan_bazel.sh
@@ -25,7 +25,7 @@
 fi
 
 if [[ -z ${STD:-} ]]; then
-  STD="c++17 c++20"
+  STD="c++17 c++20 c++23"
 fi
 
 if [[ -z ${COMPILATION_MODE:-} ]]; then
diff --git a/ci/linux_clang-latest_libstdcxx_bazel.sh b/ci/linux_clang-latest_libstdcxx_bazel.sh
index 0d5501b..06aef62 100755
--- a/ci/linux_clang-latest_libstdcxx_bazel.sh
+++ b/ci/linux_clang-latest_libstdcxx_bazel.sh
@@ -25,7 +25,7 @@
 fi
 
 if [[ -z ${STD:-} ]]; then
-  STD="c++17 c++20"
+  STD="c++17 c++20 c++23"
 fi
 
 if [[ -z ${COMPILATION_MODE:-} ]]; then
diff --git a/ci/linux_gcc-latest_libstdcxx_bazel.sh b/ci/linux_gcc-latest_libstdcxx_bazel.sh
index 849766b..2daa132 100755
--- a/ci/linux_gcc-latest_libstdcxx_bazel.sh
+++ b/ci/linux_gcc-latest_libstdcxx_bazel.sh
@@ -25,7 +25,7 @@
 fi
 
 if [[ -z ${STD:-} ]]; then
-  STD="c++17 c++20"
+  STD="c++17 c++20 c++23"
 fi
 
 if [[ -z ${COMPILATION_MODE:-} ]]; then