Thanks for the link, based on it I removed some of the nullptr usages
keeping only assignments.
François
On 26/06/2024 23:41, Jonathan Wakely wrote:
On Wed, 26 Jun 2024 at 21:39, François Dumont <frs.dum...@gmail.com> wrote:
Hi
Here is my proposal to add support for fancy allocator pointer.
The only place where we still have C pointers is at the
iterator::pointer level but it's consistent with std::list
implementation and also logical considering that we do not get
value_type pointers from the allocator.
I also wondered if it was ok to use nullptr in different places or if I
should rather do __node_ptr{}. But recent modifications are using
nullptr so I think it's fine.
I haven't reviewed the patch yet, but this answers the nullptr question:
https://en.cppreference.com/w/cpp/named_req/NullablePointer
(aka Cpp17NullablePointer in the C++ standard).
diff --git a/libstdc++-v3/include/bits/hashtable.h
b/libstdc++-v3/include/bits/hashtable.h
index 361da2b3b4d..845792f7ba9 100644
--- a/libstdc++-v3/include/bits/hashtable.h
+++ b/libstdc++-v3/include/bits/hashtable.h
@@ -200,8 +200,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_RehashPolicy, _Traits>,
private __detail::_Hashtable_alloc<
__alloc_rebind<_Alloc,
- __detail::_Hash_node<_Value,
- _Traits::__hash_cached::value>>>,
+ __detail::__get_node_type<_Alloc, _Value,
+
_Traits::__hash_cached::value>>>,
private _Hashtable_enable_default_ctor<_Equal, _Hash, _Alloc>
{
static_assert(is_same<typename remove_cv<_Value>::type, _Value>::value,
@@ -216,21 +216,23 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
using __traits_type = _Traits;
using __hash_cached = typename __traits_type::__hash_cached;
using __constant_iterators = typename
__traits_type::__constant_iterators;
- using __node_type = __detail::_Hash_node<_Value, __hash_cached::value>;
+ using __node_type = __detail::__get_node_type<_Alloc, _Value,
+ _Traits::__hash_cached::value>;
using __node_alloc_type = __alloc_rebind<_Alloc, __node_type>;
-
using __hashtable_alloc = __detail::_Hashtable_alloc<__node_alloc_type>;
using __node_value_type =
__detail::_Hash_node_value<_Value, __hash_cached::value>;
using __node_ptr = typename __hashtable_alloc::__node_ptr;
- using __value_alloc_traits =
- typename __hashtable_alloc::__value_alloc_traits;
using __node_alloc_traits =
typename __hashtable_alloc::__node_alloc_traits;
+ using __value_alloc_traits =
+ typename __node_alloc_traits::template rebind_traits<_Value>;
using __node_base = typename __hashtable_alloc::__node_base;
using __node_base_ptr = typename __hashtable_alloc::__node_base_ptr;
+ using __node_base_ptr_traits = std::pointer_traits<__node_base_ptr>;
using __buckets_ptr = typename __hashtable_alloc::__buckets_ptr;
+ using __buckets_ptr_traits = std::pointer_traits<__buckets_ptr>;
using __insert_base = __detail::_Insert<_Key, _Value, _Alloc,
_ExtractKey,
_Equal, _Hash,
@@ -258,15 +260,15 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
using const_iterator = typename __insert_base::const_iterator;
- using local_iterator = __detail::_Local_iterator<key_type, _Value,
- _ExtractKey, _Hash, _RangeHash, _Unused,
- __constant_iterators::value,
- __hash_cached::value>;
+ using local_iterator = __detail::__local_iterator<
+ __node_ptr, key_type, value_type,
+ _ExtractKey, _Hash, _RangeHash, _Unused,
+ __constant_iterators::value, __hash_cached::value>;
- using const_local_iterator = __detail::_Local_const_iterator<
- key_type, _Value,
- _ExtractKey, _Hash, _RangeHash, _Unused,
- __constant_iterators::value, __hash_cached::value>;
+ using const_local_iterator = __detail::__const_local_iterator<
+ __node_ptr, key_type, value_type,
+ _ExtractKey, _Hash, _RangeHash, _Unused,
+ __constant_iterators::value, __hash_cached::value>;
private:
using __rehash_type = _RehashPolicy;
@@ -392,7 +394,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
#endif
private:
- __buckets_ptr _M_buckets = &_M_single_bucket;
+ __buckets_ptr _M_buckets =
+ __buckets_ptr_traits::pointer_to(_M_single_bucket);
size_type _M_bucket_count = 1;
__node_base _M_before_begin;
size_type _M_element_count = 0;
@@ -406,11 +409,19 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
// numerous checks in the code to avoid 0 modulus.
__node_base_ptr _M_single_bucket = nullptr;
+ __buckets_ptr
+ _M_single_bucket_ptr()
+ { return __buckets_ptr_traits::pointer_to(_M_single_bucket); }
+
+ __node_base_ptr
+ _M_before_begin_ptr()
+ { return __node_base_ptr_traits::pointer_to(_M_before_begin); }
+
void
_M_update_bbegin()
{
if (auto __begin = _M_begin())
- _M_buckets[_M_bucket_index(*__begin)] = &_M_before_begin;
+ _M_buckets[_M_bucket_index(*__begin)] = _M_before_begin_ptr();
}
void
@@ -422,7 +433,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
bool
_M_uses_single_bucket(__buckets_ptr __bkts) const
- { return __builtin_expect(__bkts == &_M_single_bucket, false); }
+ {
+ return __builtin_expect
+ (std::__to_address(__bkts) == std::addressof(_M_single_bucket),
+ false);
+ }
bool
_M_uses_single_bucket() const
@@ -444,7 +459,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
if (__builtin_expect(__bkt_count == 1, false))
{
_M_single_bucket = nullptr;
- return &_M_single_bucket;
+ return _M_single_bucket_ptr();
}
return __hashtable_alloc::_M_allocate_buckets(__bkt_count);
@@ -463,18 +478,26 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_M_deallocate_buckets()
{ _M_deallocate_buckets(_M_buckets, _M_bucket_count); }
+ static __node_ptr
+ _S_cast(__node_ptr __n)
+ { return __n; }
+
+ static __node_ptr
+ _S_cast(__detail::_Hash_node_base* __n)
+ { return static_cast<__node_ptr>(__n); }
+
// Gets bucket begin, deals with the fact that non-empty buckets contain
// their before begin node.
__node_ptr
_M_bucket_begin(size_type __bkt) const
{
__node_base_ptr __n = _M_buckets[__bkt];
- return __n ? static_cast<__node_ptr>(__n->_M_nxt) : nullptr;
+ return __n ? _S_cast(__n->_M_nxt) : __node_ptr{};
}
__node_ptr
_M_begin() const
- { return static_cast<__node_ptr>(_M_before_begin._M_nxt); }
+ { return _S_cast(_M_before_begin._M_nxt); }
// Assign *this using another _Hashtable instance. Whether elements
// are copied or moved depends on the _Ht reference.
@@ -824,8 +847,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
__node_base_ptr __before_n = _M_find_before_node(__bkt, __key, __c);
if (__before_n)
- return static_cast<__node_ptr>(__before_n->_M_nxt);
- return nullptr;
+ return _S_cast(__before_n->_M_nxt);
+ return __node_ptr{};
}
template<typename _Kt>
@@ -835,8 +858,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
auto __before_n = _M_find_before_node_tr(__bkt, __key, __c);
if (__before_n)
- return static_cast<__node_ptr>(__before_n->_M_nxt);
- return nullptr;
+ return _S_cast(__before_n->_M_nxt);
+ return __node_ptr{};
}
// Insert a node at the beginning of a bucket.
@@ -863,7 +886,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
// _M_before_begin.
_M_buckets[_M_bucket_index(*__node->_M_next())] = __node;
- _M_buckets[__bkt] = &_M_before_begin;
+ _M_buckets[__bkt] = _M_before_begin_ptr();
}
}
@@ -1051,7 +1074,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
__glibcxx_assert(get_allocator() == __nh.get_allocator());
- __node_ptr __n = nullptr;
+ __node_ptr __n{};
const key_type& __k = __nh._M_key();
const size_type __size = size();
if (__size <= __small_size_threshold())
@@ -1109,7 +1132,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
node_type
_M_extract_node(size_t __bkt, __node_base_ptr __prev_n)
{
- __node_ptr __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
+ __node_ptr __n = _S_cast(__prev_n->_M_nxt);
if (__prev_n == _M_buckets[__bkt])
_M_remove_bucket_begin(__bkt, __n->_M_next(),
__n->_M_nxt ? _M_bucket_index(*__n->_M_next()) : 0);
@@ -1157,7 +1180,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
node_type __nh;
__hash_code __code = this->_M_hash_code(__k);
std::size_t __bkt = _M_bucket_index(__code);
- if (__node_base_ptr __prev_node = _M_find_before_node(__bkt, __k,
__code))
+ if (auto __prev_node = _M_find_before_node(__bkt, __k, __code))
__nh = _M_extract_node(__bkt, __prev_node);
return __nh;
}
@@ -1199,7 +1222,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
= _M_src_hash_code(__src.hash_function(), __k, *__pos._M_cur);
size_type __bkt = _M_bucket_index(__code);
if (__size <= __small_size_threshold()
- || _M_find_node(__bkt, __k, __code) == nullptr)
+ || !_M_find_node(__bkt, __k, __code))
{
auto __nh = __src.extract(__pos);
_M_insert_unique_node(__bkt, __code, __nh._M_ptr, __n_elt);
@@ -1220,7 +1243,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
node_type>, "Node types are compatible");
__glibcxx_assert(get_allocator() == __src.get_allocator());
- __node_ptr __hint = nullptr;
+ __node_ptr __hint{};
this->reserve(size() + __src.size());
for (auto __i = __src.cbegin(), __end = __src.cend(); __i != __end;)
{
@@ -1368,7 +1391,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
_M_assign_elements(_Ht&& __ht)
{
- __buckets_ptr __former_buckets = nullptr;
+ __buckets_ptr __former_buckets{};
std::size_t __former_bucket_count = _M_bucket_count;
__rehash_guard_t __rehash_guard(_M_rehash_policy);
@@ -1417,7 +1440,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
_M_assign(_Ht&& __ht, const _NodeGenerator& __node_gen)
{
- __buckets_ptr __buckets = nullptr;
+ __buckets_ptr __buckets{};
if (!_M_buckets)
_M_buckets = __buckets = _M_allocate_buckets(_M_bucket_count);
@@ -1468,7 +1491,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_M_rehash_policy._M_reset();
_M_bucket_count = 1;
_M_single_bucket = nullptr;
- _M_buckets = &_M_single_bucket;
+ _M_buckets = _M_single_bucket_ptr();
_M_before_begin._M_nxt = nullptr;
_M_element_count = 0;
}
@@ -1493,7 +1516,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_M_buckets = __ht._M_buckets;
else
{
- _M_buckets = &_M_single_bucket;
+ _M_buckets = _M_single_bucket_ptr();
_M_single_bucket = __ht._M_single_bucket;
}
@@ -1571,7 +1594,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
// Update buckets if __ht is using its single bucket.
if (__ht._M_uses_single_bucket())
{
- _M_buckets = &_M_single_bucket;
+ _M_buckets = _M_single_bucket_ptr();
_M_single_bucket = __ht._M_single_bucket;
}
@@ -1624,7 +1647,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
if (__ht._M_uses_single_bucket())
{
- _M_buckets = &_M_single_bucket;
+ _M_buckets = _M_single_bucket_ptr();
_M_single_bucket = __ht._M_single_bucket;
}
else
@@ -1694,13 +1717,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
if (!__x._M_uses_single_bucket())
{
_M_buckets = __x._M_buckets;
- __x._M_buckets = &__x._M_single_bucket;
+ __x._M_buckets = __x._M_single_bucket_ptr();
}
}
else if (__x._M_uses_single_bucket())
{
__x._M_buckets = _M_buckets;
- _M_buckets = &_M_single_bucket;
+ _M_buckets = _M_single_bucket_ptr();
}
else
std::swap(_M_buckets, __x._M_buckets);
@@ -1947,7 +1970,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
if (size() <= __small_size_threshold())
{
- __node_ptr __n, __beg = nullptr;
+ __node_ptr __n, __beg{};
for (__n = _M_begin(); __n; __n = __n->_M_next())
{
if (this->_M_key_equals_tr(__k, *__n))
@@ -1991,7 +2014,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
if (size() <= __small_size_threshold())
{
- __node_ptr __n, __beg = nullptr;
+ __node_ptr __n, __beg{};
for (__n = _M_begin(); __n; __n = __n->_M_next())
{
if (this->_M_key_equals_tr(__k, *__n))
@@ -2035,12 +2058,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_M_find_before_node(const key_type& __k)
-> __node_base_ptr
{
- __node_base_ptr __prev_p = &_M_before_begin;
+ __node_base_ptr __prev_p = _M_before_begin_ptr();
if (!__prev_p->_M_nxt)
- return nullptr;
+ return {};
- for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);
- __p != nullptr;
+ for (__node_ptr __p = _S_cast(__prev_p->_M_nxt); __p;
__p = __p->_M_next())
{
if (this->_M_key_equals(__k, *__p))
@@ -2049,7 +2071,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__prev_p = __p;
}
- return nullptr;
+ return {};
}
// Find the node before the one whose key compares equal to k in the bucket
@@ -2067,10 +2089,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
__node_base_ptr __prev_p = _M_buckets[__bkt];
if (!__prev_p)
- return nullptr;
+ return {};
- for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);;
- __p = __p->_M_next())
+ for (__node_ptr __p = _S_cast(__prev_p->_M_nxt);; __p = __p->_M_next())
{
if (this->_M_equals(__k, __code, *__p))
return __prev_p;
@@ -2080,7 +2101,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__prev_p = __p;
}
- return nullptr;
+ return {};
}
template<typename _Key, typename _Value, typename _Alloc,
@@ -2097,10 +2118,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
__node_base_ptr __prev_p = _M_buckets[__bkt];
if (!__prev_p)
- return nullptr;
+ return {};
- for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);;
- __p = __p->_M_next())
+ for (__node_ptr __p = _S_cast(__prev_p->_M_nxt);; __p = __p->_M_next())
{
if (this->_M_equals_tr(__k, __code, *__p))
return __prev_p;
@@ -2110,7 +2130,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__prev_p = __p;
}
- return nullptr;
+ return {};
}
template<typename _Key, typename _Value, typename _Alloc,
@@ -2273,10 +2293,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
// Find the node before an equivalent one or use hint if it exists and
// if it is equivalent.
+ __node_base_ptr __hint_base = __hint;
__node_base_ptr __prev
- = __builtin_expect(__hint != nullptr, false)
+ = __builtin_expect(__hint ? 1 : 0, 0)
&& this->_M_equals(__k, __code, *__hint)
- ? __hint
+ ? __hint_base
: _M_find_before_node(__bkt, __k, __code);
if (__prev)
@@ -2437,7 +2458,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return 0;
// We found a matching node, erase it.
- __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
+ __n = _S_cast(__prev_n->_M_nxt);
__bkt = _M_bucket_index(*__n);
}
else
@@ -2451,7 +2472,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return 0;
// We found a matching node, erase it.
- __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
+ __n = _S_cast(__prev_n->_M_nxt);
}
_M_erase(__bkt, __prev_n, __n);
@@ -2478,7 +2499,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return 0;
// We found a matching node, erase it.
- __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
+ __n = _S_cast(__prev_n->_M_nxt);
__bkt = _M_bucket_index(*__n);
}
else
@@ -2491,7 +2512,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
if (!__prev_n)
return 0;
- __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
+ __n = _S_cast(__prev_n->_M_nxt);
}
// _GLIBCXX_RESOLVE_LIB_DEFECTS
@@ -2633,7 +2654,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
__p->_M_nxt = _M_before_begin._M_nxt;
_M_before_begin._M_nxt = __p;
- __new_buckets[__bkt] = &_M_before_begin;
+ __new_buckets[__bkt] = _M_before_begin_ptr();
if (__p->_M_nxt)
__new_buckets[__bbegin_bkt] = __p;
__bbegin_bkt = __bkt;
@@ -2668,7 +2689,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
_M_before_begin._M_nxt = nullptr;
std::size_t __bbegin_bkt = 0;
std::size_t __prev_bkt = 0;
- __node_ptr __prev_p = nullptr;
+ __node_ptr __prev_p{};
bool __check_bucket = false;
while (__p)
@@ -2713,7 +2734,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
{
__p->_M_nxt = _M_before_begin._M_nxt;
_M_before_begin._M_nxt = __p;
- __new_buckets[__bkt] = &_M_before_begin;
+ __new_buckets[__bkt] = _M_before_begin_ptr();
if (__p->_M_nxt)
__new_buckets[__bbegin_bkt] = __p;
__bbegin_bkt = __bkt;
diff --git a/libstdc++-v3/include/bits/hashtable_policy.h
b/libstdc++-v3/include/bits/hashtable_policy.h
index 26def24f24e..225381ccb72 100644
--- a/libstdc++-v3/include/bits/hashtable_policy.h
+++ b/libstdc++-v3/include/bits/hashtable_policy.h
@@ -33,8 +33,9 @@
#include <tuple> // for std::tuple, std::forward_as_tuple
#include <bits/functional_hash.h> // for __is_fast_hash
-#include <bits/stl_algobase.h> // for std::min, std::is_permutation.
+#include <bits/stl_algobase.h> // for std::min, std::is_permutation
#include <bits/stl_pair.h> // for std::pair
+#include <bits/stl_uninitialized.h> // for __uninitialized_default_n
#include <ext/aligned_buffer.h> // for __gnu_cxx::__aligned_buffer
#include <ext/alloc_traits.h> // for std::__alloc_rebind
#include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
@@ -82,6 +83,11 @@ namespace __detail
{ return __distance_fw(__first, __last,
std::__iterator_category(__first)); }
+ template<typename _Alloc, typename _Value>
+ using __alloc_val_ptr =
+ typename std::allocator_traits<__alloc_rebind<_Alloc,
+ _Value>>::pointer;
+
struct _Identity
{
template<typename _Tp>
@@ -321,6 +327,28 @@ namespace __detail
_Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
};
+ /**
+ * struct _Hash_pnode_base
+ *
+ * Like _Hash_node_base but used in case of custom pointer type defined by
the
+ * allocator.
+ */
+ template<typename _NodePtr>
+ struct _Hash_pnode_base
+ {
+ using __node_ptr = _NodePtr;
+
+ __node_ptr _M_nxt;
+
+ _Hash_pnode_base()
+ noexcept(std::is_nothrow_default_constructible<__node_ptr>::value)
+ : _M_nxt() { }
+
+ _Hash_pnode_base(__node_ptr __next)
+ noexcept(std::is_nothrow_copy_constructible<__node_ptr>::value)
+ : _M_nxt(__next) { }
+ };
+
/**
* struct _Hash_node_value_base
*
@@ -356,18 +384,23 @@ namespace __detail
/**
* Primary template struct _Hash_node_code_cache.
+ *
+ * No cache.
*/
template<bool _Cache_hash_code>
struct _Hash_node_code_cache
{ };
/**
- * Specialization for node with cache, struct _Hash_node_code_cache.
+ * Specialization for node with cache.
*/
template<>
struct _Hash_node_code_cache<true>
{ std::size_t _M_hash_code; };
+ /**
+ * Node with value and optionally a cache for the hash code.
+ */
template<typename _Value, bool _Cache_hash_code>
struct _Hash_node_value
: _Hash_node_value_base<_Value>
@@ -375,28 +408,64 @@ namespace __detail
{ };
/**
- * Primary template struct _Hash_node.
+ * struct _Hash_node.
+ *
+ * The node definition when the allocator is using raw pointers.
*/
template<typename _Value, bool _Cache_hash_code>
struct _Hash_node
: _Hash_node_base
, _Hash_node_value<_Value, _Cache_hash_code>
{
- _Hash_node*
+ using __node_ptr = _Hash_node*;
+ using __node_type = _Hash_node;
+
+ __node_ptr
+ _M_next() const noexcept
+ { return static_cast<__node_ptr>(this->_M_nxt); }
+ };
+
+ /**
+ * struct _Hash_pnode.
+ *
+ * The node definition used when the allocator define a custom pointer type.
+ */
+ template<typename _ValuePtr, bool _Cache_hash_code>
+ struct _Hash_pnode
+ : _Hash_pnode_base<__ptr_rebind<
+ _ValuePtr, _Hash_pnode<_ValuePtr, _Cache_hash_code>>>
+ , _Hash_node_value<
+ typename std::pointer_traits<_ValuePtr>::element_type, _Cache_hash_code>
+ {
+ using __node_ptr = __ptr_rebind<
+ _ValuePtr, _Hash_pnode<_ValuePtr, _Cache_hash_code>>;
+ using __node_type =
+ typename std::pointer_traits<__node_ptr>::element_type;
+ typedef typename std::pointer_traits<__node_ptr>::difference_type
+ difference_type;
+
+ __node_ptr
_M_next() const noexcept
- { return static_cast<_Hash_node*>(this->_M_nxt); }
+ { return this->_M_nxt; }
};
+ template<typename _Alloc, typename _Value, bool __hash_cached>
+ using __get_node_type = typename std::conditional<
+ std::is_pointer<__alloc_val_ptr<_Alloc, _Value>>::value,
+ _Hash_node<_Value, __hash_cached>,
+ _Hash_pnode<__alloc_val_ptr<_Alloc, _Value>, __hash_cached>>::type;
+
/// Base class for node iterators.
- template<typename _Value, bool _Cache_hash_code>
- struct _Node_iterator_base
+ template<typename _NodePtr>
+ struct _Hashtable_iterator_base
{
- using __node_type = _Hash_node<_Value, _Cache_hash_code>;
+ using __node_ptr = _NodePtr;
+ using __node_type = typename std::pointer_traits<_NodePtr>::element_type;
- __node_type* _M_cur;
+ __node_ptr _M_cur;
- _Node_iterator_base() : _M_cur(nullptr) { }
- _Node_iterator_base(__node_type* __p) noexcept
+ _Hashtable_iterator_base() : _M_cur() { }
+ _Hashtable_iterator_base(__node_ptr __p) noexcept
: _M_cur(__p) { }
void
@@ -404,18 +473,33 @@ namespace __detail
{ _M_cur = _M_cur->_M_next(); }
friend bool
- operator==(const _Node_iterator_base& __x, const _Node_iterator_base&
__y)
- noexcept
+ operator==(const _Hashtable_iterator_base& __x,
+ const _Hashtable_iterator_base& __y) noexcept
{ return __x._M_cur == __y._M_cur; }
#if __cpp_impl_three_way_comparison < 201907L
friend bool
- operator!=(const _Node_iterator_base& __x, const _Node_iterator_base&
__y)
- noexcept
+ operator!=(const _Hashtable_iterator_base& __x,
+ const _Hashtable_iterator_base& __y) noexcept
{ return __x._M_cur != __y._M_cur; }
#endif
};
+ /// Base class for node iterators.
+ template<typename _Value, bool _Cache_hash_code>
+ struct _Node_iterator_base
+ : _Hashtable_iterator_base<_Hash_node<_Value, _Cache_hash_code>*>
+ {
+ using __base_type =
+ _Hashtable_iterator_base<_Hash_node<_Value, _Cache_hash_code>*>;
+ using __node_ptr = typename __base_type::__node_ptr;
+ using __node_type = typename __base_type::__node_ptr;
+
+ _Node_iterator_base() = default;
+ _Node_iterator_base(__node_ptr __p) noexcept
+ : __base_type(__p) { }
+ };
+
/// Node iterators, used to iterate through all the hashtable.
template<typename _Value, bool __constant_iterators, bool __cache>
struct _Node_iterator
@@ -424,6 +508,7 @@ namespace __detail
private:
using __base_type = _Node_iterator_base<_Value, __cache>;
using __node_type = typename __base_type::__node_type;
+ using __node_ptr = typename __base_type::__node_ptr;
public:
using value_type = _Value;
@@ -439,7 +524,7 @@ namespace __detail
_Node_iterator() = default;
explicit
- _Node_iterator(__node_type* __p) noexcept
+ _Node_iterator(__node_ptr __p) noexcept
: __base_type(__p) { }
reference
@@ -474,6 +559,7 @@ namespace __detail
private:
using __base_type = _Node_iterator_base<_Value, __cache>;
using __node_type = typename __base_type::__node_type;
+ using __node_ptr = typename __base_type::__node_ptr;
public:
typedef _Value value_type;
@@ -486,7 +572,7 @@ namespace __detail
_Node_const_iterator() = default;
explicit
- _Node_const_iterator(__node_type* __p) noexcept
+ _Node_const_iterator(__node_ptr __p) noexcept
: __base_type(__p) { }
_Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
@@ -517,6 +603,110 @@ namespace __detail
}
};
+ /// Node iterators, used to iterate through all the hashtable.
+ template<typename _NodePtr, bool __constant_iterators>
+ struct _Hashtable_iterator
+ : public _Hashtable_iterator_base<_NodePtr>
+ {
+ private:
+ using __base_type = _Hashtable_iterator_base<_NodePtr>;
+ using __node_type = typename __base_type::__node_type;
+ using __node_ptr = typename __base_type::__node_ptr;
+
+ public:
+ typedef typename __node_type::value_type value_type;
+ typedef typename __node_type::difference_type difference_type;
+ typedef std::forward_iterator_tag
iterator_category;
+
+ using pointer = typename std::conditional<__constant_iterators,
+ const value_type*, value_type*>::type;
+
+ using reference = typename std::conditional<__constant_iterators,
+ const value_type&, value_type&>::type;
+
+ _Hashtable_iterator() = default;
+
+ explicit
+ _Hashtable_iterator(__node_ptr __p) noexcept
+ : __base_type(__p) { }
+
+ reference
+ operator*() const noexcept
+ { return this->_M_cur->_M_v(); }
+
+ pointer
+ operator->() const noexcept
+ { return this->_M_cur->_M_valptr(); }
+
+ _Hashtable_iterator&
+ operator++() noexcept
+ {
+ this->_M_incr();
+ return *this;
+ }
+
+ _Hashtable_iterator
+ operator++(int) noexcept
+ {
+ _Hashtable_iterator __tmp(*this);
+ this->_M_incr();
+ return __tmp;
+ }
+ };
+
+ /// Node const_iterators, used to iterate through all the hashtable.
+ template<typename _NodePtr, bool __constant_iterators>
+ struct _Hashtable_const_iterator
+ : public _Hashtable_iterator_base<_NodePtr>
+ {
+ private:
+ using __base_type = _Hashtable_iterator_base<_NodePtr>;
+ using __node_type = typename __base_type::__node_type;
+ using __node_ptr = typename __base_type::__node_ptr;
+
+ public:
+ typedef typename __node_type::value_type value_type;
+ typedef typename __node_type::difference_type difference_type;
+ typedef std::forward_iterator_tag
iterator_category;
+
+ typedef const value_type* pointer;
+ typedef const value_type& reference;
+
+ _Hashtable_const_iterator() = default;
+
+ explicit
+ _Hashtable_const_iterator(__node_ptr __p) noexcept
+ : __base_type(__p) { }
+
+ _Hashtable_const_iterator(
+ const _Hashtable_iterator<_NodePtr,
+ __constant_iterators>& __x) noexcept
+ : __base_type(__x._M_cur) { }
+
+ reference
+ operator*() const noexcept
+ { return this->_M_cur->_M_v(); }
+
+ pointer
+ operator->() const noexcept
+ { return this->_M_cur->_M_valptr(); }
+
+ _Hashtable_const_iterator&
+ operator++() noexcept
+ {
+ this->_M_incr();
+ return *this;
+ }
+
+ _Hashtable_const_iterator
+ operator++(int) noexcept
+ {
+ _Hashtable_const_iterator __tmp(*this);
+ this->_M_incr();
+ return __tmp;
+ }
+ };
+
// Many of class template _Hashtable's template parameters are policy
// classes. These are defaults for the policies.
@@ -912,16 +1102,16 @@ namespace __detail
using __hash_cached = typename _Traits::__hash_cached;
using __constant_iterators = typename _Traits::__constant_iterators;
+ using __alloc_ptr = __alloc_val_ptr<_Alloc, _Value>;
- using __hashtable_alloc = _Hashtable_alloc<
- __alloc_rebind<_Alloc, _Hash_node<_Value,
- __hash_cached::value>>>;
+ using __node_type = __get_node_type<_Alloc, _Value,
__hash_cached::value>;
+ using __node_ptr = __ptr_rebind<__alloc_ptr, __node_type>;
+ using __node_alloc_type = __alloc_rebind<_Alloc, __node_type>;
using value_type = typename __hashtable_base::value_type;
using size_type = typename __hashtable_base::size_type;
using __unique_keys = typename _Traits::__unique_keys;
- using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
using __node_gen_type = _AllocNode<__node_alloc_type>;
__hashtable&
@@ -939,12 +1129,19 @@ namespace __detail
const _NodeGetter&, false_type __uks);
public:
- using iterator = _Node_iterator<_Value, __constant_iterators::value,
- __hash_cached::value>;
-
- using const_iterator = _Node_const_iterator<_Value,
- __constant_iterators::value,
- __hash_cached::value>;
+ using iterator =
+ typename std::conditional<std::is_pointer<__alloc_ptr>::value,
+ _Node_iterator<_Value,
+ __constant_iterators::value, __hash_cached::value>,
+ _Hashtable_iterator<__node_ptr,
+ __constant_iterators::value>>::type;
+
+ using const_iterator =
+ typename std::conditional<std::is_pointer<__alloc_ptr>::value,
+ _Node_const_iterator<_Value,
+ __constant_iterators::value, __hash_cached::value>,
+ _Hashtable_const_iterator<__node_ptr,
+ __constant_iterators::value>>::type;
using __ireturn_type = __conditional_t<__unique_keys::value,
std::pair<iterator, bool>,
@@ -1280,6 +1477,17 @@ namespace __detail
bool __cache_hash_code>
struct _Local_iterator_base;
+ /**
+ * Primary class template _Hashtable_local_iter_base.
+ *
+ * Base class for local iterators, used to iterate within a bucket
+ * but not between buckets.
+ */
+ template<typename _Key, typename _NodePtr, typename _ExtractKey,
+ typename _Hash, typename _RangeHash, typename _Unused,
+ bool __cache_hash_code>
+ struct _Hashtable_local_iter_base;
+
/**
* Primary class template _Hash_code_base.
*
@@ -1440,6 +1648,49 @@ namespace __detail
_M_get_bucket() const { return _M_bucket; } // for debug mode
};
+ /// Partial specialization used when nodes contain a cached hash code.
+ template<typename _Key, typename _NodePtr, typename _ExtractKey,
+ typename _Hash, typename _RangeHash, typename _Unused>
+ struct _Hashtable_local_iter_base<_Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused, true>
+ : public _Hashtable_iterator_base<_NodePtr>
+ {
+ protected:
+ using __base_node_iter = _Hashtable_iterator_base<_NodePtr>;
+ using __node_ptr = typename __base_node_iter::__node_ptr;
+ using __node_type = typename __base_node_iter::__node_type;
+ using value_type = typename __node_type::value_type;
+ using __hash_code_base = _Hash_code_base<_Key, value_type, _ExtractKey,
+ _Hash, _RangeHash, _Unused, true>;
+
+ _Hashtable_local_iter_base() = default;
+ _Hashtable_local_iter_base(const __hash_code_base&,
+ __node_ptr __p,
+ std::size_t __bkt, std::size_t __bkt_count)
+ : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
+ { }
+
+ void
+ _M_incr()
+ {
+ __base_node_iter::_M_incr();
+ if (this->_M_cur)
+ {
+ std::size_t __bkt
+ = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
+ if (__bkt != _M_bucket)
+ this->_M_cur = nullptr;
+ }
+ }
+
+ std::size_t _M_bucket;
+ std::size_t _M_bucket_count;
+
+ public:
+ std::size_t
+ _M_get_bucket() const { return _M_bucket; } // for debug mode
+ };
+
// Uninitialized storage for a _Hash_code_base.
// This type is DefaultConstructible and Assignable even if the
// _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
@@ -1554,6 +1805,86 @@ namespace __detail
_M_get_bucket() const { return _M_bucket; } // for debug mode
};
+ // Partial specialization used when hash codes are not cached
+ template<typename _Key, typename _NodePtr, typename _ExtractKey,
+ typename _Hash, typename _RangeHash, typename _Unused>
+ struct _Hashtable_local_iter_base<_Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused, false>
+ : _Hash_code_storage<_Hash>
+ , _Hashtable_iterator_base<_NodePtr>
+ {
+ protected:
+ using __node_iter_base = _Hashtable_iterator_base<_NodePtr>;
+ using __node_type = typename __node_iter_base::__node_type;
+ using __node_ptr = typename __node_iter_base::__node_ptr;
+ using value_type = typename __node_type::value_type;
+ using __hash_code_base = _Hash_code_base<_Key, value_type, _ExtractKey,
+ _Hash, _RangeHash, _Unused, false>;
+
+ _Hashtable_local_iter_base() : _M_bucket_count(-1) { }
+
+ _Hashtable_local_iter_base(const __hash_code_base& __base,
+ __node_ptr __p,
+ std::size_t __bkt, std::size_t __bkt_count)
+ : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
+ { _M_init(__base.hash_function()); }
+
+ ~_Hashtable_local_iter_base()
+ {
+ if (_M_bucket_count != -1)
+ _M_destroy();
+ }
+
+ _Hashtable_local_iter_base(const _Hashtable_local_iter_base& __iter)
+ : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
+ , _M_bucket_count(__iter._M_bucket_count)
+ {
+ if (_M_bucket_count != -1)
+ _M_init(*__iter._M_h());
+ }
+
+ _Hashtable_local_iter_base&
+ operator=(const _Hashtable_local_iter_base& __iter)
+ {
+ if (_M_bucket_count != -1)
+ _M_destroy();
+ this->_M_cur = __iter._M_cur;
+ _M_bucket = __iter._M_bucket;
+ _M_bucket_count = __iter._M_bucket_count;
+ if (_M_bucket_count != -1)
+ _M_init(*__iter._M_h());
+ return *this;
+ }
+
+ void
+ _M_incr()
+ {
+ __node_iter_base::_M_incr();
+ if (this->_M_cur)
+ {
+ std::size_t __bkt =
+ _RangeHash{}((*this->_M_h())(_ExtractKey{}(this->_M_cur->_M_v())),
+ _M_bucket_count);
+ if (__bkt != _M_bucket)
+ this->_M_cur = nullptr;
+ }
+ }
+
+ std::size_t _M_bucket;
+ std::size_t _M_bucket_count;
+
+ void
+ _M_init(const _Hash& __h)
+ { ::new(this->_M_h()) _Hash(__h); }
+
+ void
+ _M_destroy() { this->_M_h()->~_Hash(); }
+
+ public:
+ std::size_t
+ _M_get_bucket() const { return _M_bucket; } // for debug mode
+ };
+
/// local iterators
template<typename _Key, typename _Value, typename _ExtractKey,
typename _Hash, typename _RangeHash, typename _Unused,
@@ -1667,6 +1998,156 @@ namespace __detail
}
};
+ /// local iterators
+ template<typename _Key, typename _NodePtr, typename _ExtractKey,
+ typename _Hash, typename _RangeHash, typename _Unused,
+ bool __constant_iterators, bool __cache>
+ struct _Hashtable_local_iterator
+ : public _Hashtable_local_iter_base<_Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused, __cache>
+ {
+ private:
+ using __base_type =
+ _Hashtable_local_iter_base<_Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused, __cache>;
+ using __hash_code_base = typename __base_type::__hash_code_base;
+ using __node_type = typename __base_type::__node_type;
+ using __node_ptr = typename __base_type::__node_ptr;
+
+ public:
+ typedef typename __base_type::value_type value_type;
+ typedef typename std::conditional<__constant_iterators,
+ const value_type*, value_type*>::type
+ pointer;
+ typedef typename std::conditional<__constant_iterators,
+ const value_type&, value_type&>::type
+ reference;
+ typedef typename __node_type::difference_type difference_type;
+ typedef std::forward_iterator_tag
iterator_category;
+
+ _Hashtable_local_iterator() = default;
+
+ _Hashtable_local_iterator(const __hash_code_base& __base,
+ __node_ptr __n,
+ std::size_t __bkt, std::size_t __bkt_count)
+ : __base_type(__base, __n, __bkt, __bkt_count)
+ { }
+
+ reference
+ operator*() const
+ { return this->_M_cur->_M_v(); }
+
+ pointer
+ operator->() const
+ { return this->_M_cur->_M_valptr(); }
+
+ _Hashtable_local_iterator&
+ operator++()
+ {
+ this->_M_incr();
+ return *this;
+ }
+
+ _Hashtable_local_iterator
+ operator++(int)
+ {
+ _Hashtable_local_iterator __tmp(*this);
+ this->_M_incr();
+ return __tmp;
+ }
+ };
+
+ /// local const_iterators
+ template<typename _Key, typename _NodePtr, typename _ExtractKey,
+ typename _Hash, typename _RangeHash, typename _Unused,
+ bool __constant_iterators, bool __cache>
+ struct _Hashtable_const_local_iterator
+ : public _Hashtable_local_iter_base<_Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused, __cache>
+ {
+ private:
+ using __base_type =
+ _Hashtable_local_iter_base<_Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused, __cache>;
+ using __hash_code_base = typename __base_type::__hash_code_base;
+ using __node_type = typename __base_type::__node_type;
+ using __node_ptr = typename __base_type::__node_ptr;
+
+ public:
+ typedef typename __base_type::value_type value_type;
+ typedef const value_type* pointer;
+ typedef const value_type& reference;
+ typedef typename __node_type::difference_type difference_type;
+ typedef std::forward_iterator_tag
iterator_category;
+
+ _Hashtable_const_local_iterator() = default;
+
+ _Hashtable_const_local_iterator(const __hash_code_base& __base,
+ __node_ptr __n,
+ std::size_t __bkt, std::size_t __bkt_count)
+ : __base_type(__base, __n, __bkt, __bkt_count)
+ { }
+
+ _Hashtable_const_local_iterator(const _Hashtable_local_iterator<
+ _Key, _NodePtr, _ExtractKey,
+ _Hash, _RangeHash, _Unused,
+ __constant_iterators,
+ __cache>& __x)
+ : __base_type(__x)
+ { }
+
+ reference
+ operator*() const
+ { return this->_M_cur->_M_v(); }
+
+ pointer
+ operator->() const
+ { return this->_M_cur->_M_valptr(); }
+
+ _Hashtable_const_local_iterator&
+ operator++()
+ {
+ this->_M_incr();
+ return *this;
+ }
+
+ _Hashtable_const_local_iterator
+ operator++(int)
+ {
+ _Hashtable_const_local_iterator __tmp(*this);
+ this->_M_incr();
+ return __tmp;
+ }
+ };
+
+ template<typename _NodePtr, typename _Key, typename _Value,
+ typename _ExtractKey, typename _Hash,
+ typename _RangeHash, typename _Unused,
+ bool __constant_iterators, bool __hash_cached>
+ using __local_iterator = typename std::conditional<
+ std::is_pointer<_NodePtr>::value,
+ _Local_iterator<_Key, _Value,
+ _ExtractKey, _Hash, _RangeHash, _Unused,
+ __constant_iterators, __hash_cached>,
+ _Hashtable_local_iterator<_Key, _NodePtr,
+ _ExtractKey, _Hash, _RangeHash, _Unused,
+ __constant_iterators,
+ __hash_cached>>::type;
+
+ template<typename _NodePtr, typename _Key, typename _Value,
+ typename _ExtractKey, typename _Hash,
+ typename _RangeHash, typename _Unused,
+ bool __constant_iterators, bool __hash_cached>
+ using __const_local_iterator = typename std::conditional<
+ std::is_pointer<_NodePtr>::value,
+ _Local_const_iterator<_Key, _Value,
+ _ExtractKey, _Hash, _RangeHash, _Unused,
+ __constant_iterators, __hash_cached>,
+ _Hashtable_const_local_iterator<_Key, _NodePtr,
+ _ExtractKey, _Hash, _RangeHash, _Unused,
+ __constant_iterators,
+ __hash_cached>>::type;
+
/**
* Primary class template _Hashtable_base.
*
@@ -1943,10 +2424,16 @@ namespace __detail
using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
template<typename>
- struct __get_value_type;
+ struct __get_node_base;
template<typename _Val, bool _Cache_hash_code>
- struct __get_value_type<_Hash_node<_Val, _Cache_hash_code>>
- { using type = _Val; };
+ struct __get_node_base<_Hash_node<_Val, _Cache_hash_code>>
+ { using type = _Hash_node_base; };
+ template<typename _ValuePtr, bool _Cache_hash_code>
+ struct __get_node_base<_Hash_pnode<_ValuePtr, _Cache_hash_code>>
+ {
+ using type = _Hash_pnode_base<__ptr_rebind<
+ _ValuePtr, _Hash_pnode<_ValuePtr, _Cache_hash_code>>>;
+ };
public:
using __node_type = typename _NodeAlloc::value_type;
@@ -1954,16 +2441,13 @@ namespace __detail
// Use __gnu_cxx to benefit from _S_always_equal and al.
using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
- using __value_alloc_traits = typename __node_alloc_traits::template
- rebind_traits<typename __get_value_type<__node_type>::type>;
-
- using __node_ptr = __node_type*;
- using __node_base = _Hash_node_base;
- using __node_base_ptr = __node_base*;
+ using __node_ptr = typename __node_alloc_traits::pointer;
+ using __node_base = typename __get_node_base<__node_type>::type;
+ using __node_base_ptr = __ptr_rebind<__node_ptr, __node_base>;
using __buckets_alloc_type =
__alloc_rebind<__node_alloc_type, __node_base_ptr>;
using __buckets_alloc_traits =
std::allocator_traits<__buckets_alloc_type>;
- using __buckets_ptr = __node_base_ptr*;
+ using __buckets_ptr = typename __buckets_alloc_traits::pointer;
_Hashtable_alloc() = default;
_Hashtable_alloc(const _Hashtable_alloc&) = default;
@@ -2017,17 +2501,16 @@ namespace __detail
{
auto& __alloc = _M_node_allocator();
auto __nptr = __node_alloc_traits::allocate(__alloc, 1);
- __node_ptr __n = std::__to_address(__nptr);
__try
{
- ::new ((void*)__n) __node_type;
- __node_alloc_traits::construct(__alloc, __n->_M_valptr(),
+ __node_alloc_traits::construct(__alloc, std::__to_address(__nptr));
+ __node_alloc_traits::construct(__alloc, __nptr->_M_valptr(),
std::forward<_Args>(__args)...);
- return __n;
+ return __nptr;
}
__catch(...)
{
- __n->~__node_type();
+ __node_alloc_traits::destroy(__alloc, std::__to_address(__nptr));
__node_alloc_traits::deallocate(__alloc, __nptr, 1);
__throw_exception_again;
}
@@ -2045,10 +2528,9 @@ namespace __detail
void
_Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
{
- typedef typename __node_alloc_traits::pointer _Ptr;
- auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
- __n->~__node_type();
- __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
+ auto& __alloc = _M_node_allocator();
+ __node_alloc_traits::destroy(__alloc, std::__to_address(__n));
+ __node_alloc_traits::deallocate(__alloc, __n, 1);
}
template<typename _NodeAlloc>
@@ -2069,11 +2551,9 @@ namespace __detail
-> __buckets_ptr
{
__buckets_alloc_type __alloc(_M_node_allocator());
-
auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
- __buckets_ptr __p = std::__to_address(__ptr);
- __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
- return __p;
+ std::__uninitialized_default_n(__ptr, __bkt_count);
+ return __ptr;
}
template<typename _NodeAlloc>
@@ -2082,10 +2562,9 @@ namespace __detail
_M_deallocate_buckets(__buckets_ptr __bkts,
std::size_t __bkt_count)
{
- typedef typename __buckets_alloc_traits::pointer _Ptr;
- auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
__buckets_alloc_type __alloc(_M_node_allocator());
- __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
+ std::_Destroy_n(__bkts, __bkt_count);
+ __buckets_alloc_traits::deallocate(__alloc, __bkts, __bkt_count);
}
///@} hashtable-detail
diff --git
a/libstdc++-v3/testsuite/23_containers/unordered_map/allocator/ext_ptr.cc
b/libstdc++-v3/testsuite/23_containers/unordered_map/allocator/ext_ptr.cc
new file mode 100644
index 00000000000..5a7096a51fc
--- /dev/null
+++ b/libstdc++-v3/testsuite/23_containers/unordered_map/allocator/ext_ptr.cc
@@ -0,0 +1,40 @@
+// { dg-do run { target c++11 } }
+
+#include <unordered_map>
+#include <memory>
+#include <testsuite_hooks.h>
+#include <testsuite_allocator.h>
+
+struct T { int i; };
+
+bool operator==(const T& l, const T& r)
+{ return l.i == r.i; }
+
+struct H
+{
+ std::size_t
+ operator()(const T& t) const noexcept
+ { return t.i; }
+};
+
+struct E : std::equal_to<T>
+{ };
+
+using __gnu_test::CustomPointerAlloc;
+
+template class std::unordered_map<T, int, H, E,
+ CustomPointerAlloc<std::pair<const T, int>>>;
+
+void test01()
+{
+ typedef CustomPointerAlloc<std::pair<const T, int>> alloc_type;
+ typedef std::unordered_map<T, int, H, E, alloc_type> test_type;
+ test_type v;
+ v.insert({ T(), 0 });
+ VERIFY( ++v.begin() == v.end() );
+}
+
+int main()
+{
+ test01();
+}
diff --git
a/libstdc++-v3/testsuite/23_containers/unordered_multimap/allocator/ext_ptr.cc
b/libstdc++-v3/testsuite/23_containers/unordered_multimap/allocator/ext_ptr.cc
new file mode 100644
index 00000000000..5034175b8a9
--- /dev/null
+++
b/libstdc++-v3/testsuite/23_containers/unordered_multimap/allocator/ext_ptr.cc
@@ -0,0 +1,40 @@
+// { dg-do run { target c++11 } }
+
+#include <unordered_map>
+#include <memory>
+#include <testsuite_hooks.h>
+#include <testsuite_allocator.h>
+
+struct T { int i; };
+
+bool operator==(const T& l, const T& r)
+{ return l.i == r.i; }
+
+struct H
+{
+ std::size_t
+ operator()(const T& t) const noexcept
+ { return t.i; }
+};
+
+struct E : std::equal_to<T>
+{ };
+
+using __gnu_test::CustomPointerAlloc;
+
+template class std::unordered_multimap<T, int, H, E,
+ CustomPointerAlloc<std::pair<const T,
int>>>;
+
+void test01()
+{
+ typedef CustomPointerAlloc<std::pair<const T, int>> alloc_type;
+ typedef std::unordered_multimap<T, int, H, E, alloc_type> test_type;
+ test_type v;
+ v.insert({ T(), 0 });
+ VERIFY( ++v.begin() == v.end() );
+}
+
+int main()
+{
+ test01();
+}
diff --git
a/libstdc++-v3/testsuite/23_containers/unordered_multiset/allocator/ext_ptr.cc
b/libstdc++-v3/testsuite/23_containers/unordered_multiset/allocator/ext_ptr.cc
new file mode 100644
index 00000000000..8e8ceb1ed10
--- /dev/null
+++
b/libstdc++-v3/testsuite/23_containers/unordered_multiset/allocator/ext_ptr.cc
@@ -0,0 +1,39 @@
+// { dg-do run { target c++11 } }
+
+#include <unordered_set>
+#include <memory>
+#include <testsuite_hooks.h>
+#include <testsuite_allocator.h>
+
+struct T { int i; };
+
+bool operator==(const T& l, const T& r)
+{ return l.i == r.i; }
+
+struct H
+{
+ std::size_t
+ operator()(const T& t) const noexcept
+ { return t.i; }
+};
+
+struct E : std::equal_to<T>
+{ };
+
+using __gnu_test::CustomPointerAlloc;
+
+template class std::unordered_multiset<T, H, E, CustomPointerAlloc<T>>;
+
+void test01()
+{
+ typedef CustomPointerAlloc<T> alloc_type;
+ typedef std::unordered_multiset<T, H, E, alloc_type> test_type;
+ test_type v;
+ v.insert(T());
+ VERIFY( ++v.begin() == v.end() );
+}
+
+int main()
+{
+ test01();
+}
diff --git
a/libstdc++-v3/testsuite/23_containers/unordered_set/allocator/ext_ptr.cc
b/libstdc++-v3/testsuite/23_containers/unordered_set/allocator/ext_ptr.cc
index afcc58e39f4..2c1bbe9ad3e 100644
--- a/libstdc++-v3/testsuite/23_containers/unordered_set/allocator/ext_ptr.cc
+++ b/libstdc++-v3/testsuite/23_containers/unordered_set/allocator/ext_ptr.cc
@@ -1,24 +1,4 @@
-// Copyright (C) 2013-2024 Free Software Foundation, Inc.
-//
-// This file is part of the GNU ISO C++ Library. This library is free
-// software; you can redistribute it and/or modify it under the
-// terms of the GNU General Public License as published by the
-// Free Software Foundation; either version 3, or (at your option)
-// any later version.
-
-// This library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
-
-// You should have received a copy of the GNU General Public License along
-// with this library; see the file COPYING3. If not see
-// <http://www.gnu.org/licenses/>.
-
-// This test fails to compile since C++17 (see xfail-if below) so we can only
-// do a "run" test for C++11 and C++14, and a "compile" test for C++17 and up.
-// { dg-do run { target { c++11_only || c++14_only } } }
-// { dg-do compile { target c++17 } }
+// { dg-do run { target { c++11 } } }
#include <unordered_set>
#include <memory>
@@ -26,15 +6,22 @@
#include <testsuite_allocator.h>
struct T { int i; };
-bool operator==(const T& l, const T& r) { return l.i == r.i; }
-struct H { std::size_t operator()(const T& t) const noexcept { return t.i; }
+
+bool operator==(const T& l, const T& r)
+{ return l.i == r.i; }
+
+struct H
+{
+ std::size_t
+ operator()(const T& t) const noexcept
+ { return t.i; }
};
-struct E : std::equal_to<T> { };
+
+struct E : std::equal_to<T>
+{ };
using __gnu_test::CustomPointerAlloc;
-// { dg-xfail-if "node reinsertion assumes raw pointers" { c++17 } }
-// TODO when removing this xfail change the test back to "dg-do run".
template class std::unordered_set<T, H, E, CustomPointerAlloc<T>>;
void test01()