Update foa-based containers to store only fancy pointers

This commit is contained in:
Christian Mazakas
2023-08-15 13:03:32 -07:00
parent 4d6ebc7eb3
commit 95a37b0d3d
8 changed files with 160 additions and 158 deletions

View File

@ -231,8 +231,8 @@ private:
insert_counter_type cnt{0};
};
template<std::size_t Size>
group_access* dummy_group_accesses()
template<typename GroupAccessPtr,std::size_t Size>
GroupAccessPtr dummy_group_accesses()
{
/* Default group_access array to provide to empty containers without
* incurring dynamic allocation. Mutexes won't actually ever be used,
@ -240,23 +240,34 @@ group_access* dummy_group_accesses()
* be incremented (insertions won't succeed as capacity()==0).
*/
static group_access accesses[Size];
using pointer_traits=boost::pointer_traits<GroupAccessPtr>;
using group_access_type=typename pointer_traits::element_type;
return accesses;
static group_access_type accesses[Size];
return pointer_traits::pointer_to(*accesses);
}
/* subclasses table_arrays to add an additional group_access array */
template<typename Value,typename Group,typename SizePolicy>
struct concurrent_table_arrays:table_arrays<Value,Group,SizePolicy>
template<typename Value,typename Group,typename SizePolicy,typename Allocator>
struct concurrent_table_arrays:table_arrays<Value,Group,SizePolicy,Allocator>
{
using super=table_arrays<Value,Group,SizePolicy>;
using group_access_allocator_type=
typename boost::allocator_rebind<Allocator,group_access>::type;
using group_access_pointer=
typename boost::allocator_pointer<group_access_allocator_type>::type;
concurrent_table_arrays(const super& arrays,group_access *pga):
super{arrays},group_accesses{pga}{}
using super=table_arrays<Value,Group,SizePolicy,Allocator>;
template<typename Allocator>
static concurrent_table_arrays new_(Allocator& al,std::size_t n)
concurrent_table_arrays(const super& arrays,group_access_pointer pga):
super{arrays},group_accesses_{pga}{}
group_access* group_accesses()const noexcept{
return boost::to_address(group_accesses_);
}
static concurrent_table_arrays new_(group_access_allocator_type al,std::size_t n)
{
super x{super::new_(al,n)};
BOOST_TRY{
@ -269,54 +280,39 @@ struct concurrent_table_arrays:table_arrays<Value,Group,SizePolicy>
BOOST_CATCH_END
}
template<typename Allocator>
static concurrent_table_arrays new_group_access(Allocator& al,const super& x)
static concurrent_table_arrays new_group_access(group_access_allocator_type al,const super& x)
{
concurrent_table_arrays arrays{x,nullptr};
if(!arrays.elements){
arrays.group_accesses=dummy_group_accesses<SizePolicy::min_size()>();
if(!arrays.elements_&&std::is_same<group_access*,group_access_pointer>::value){
arrays.group_accesses_=
dummy_group_accesses<group_access_pointer,SizePolicy::min_size()>();
}
else{
using access_alloc=
typename boost::allocator_rebind<Allocator,group_access>::type;
using access_traits=boost::allocator_traits<access_alloc>;
auto aal=access_alloc(al);
arrays.group_accesses=boost::to_address(
access_traits::allocate(aal,arrays.groups_size_mask+1));
arrays.group_accesses_=
boost::allocator_allocate(al,arrays.groups_size_mask+1);
for(std::size_t i=0;i<arrays.groups_size_mask+1;++i){
::new (arrays.group_accesses+i) group_access();
::new (arrays.group_accesses()+i) group_access();
}
}
return arrays;
}
template<typename Allocator>
static void delete_(Allocator& al,concurrent_table_arrays& arrays)noexcept
static void delete_(group_access_allocator_type al,concurrent_table_arrays& arrays)noexcept
{
delete_group_access(al,arrays);
super::delete_(al,arrays);
}
template<typename Allocator>
static void delete_group_access(Allocator& al,concurrent_table_arrays& arrays)noexcept
static void delete_group_access(group_access_allocator_type al,concurrent_table_arrays& arrays)noexcept
{
if(arrays.elements){
using access_alloc=
typename boost::allocator_rebind<Allocator,group_access>::type;
using access_traits=boost::allocator_traits<access_alloc>;
using pointer=typename access_traits::pointer;
using pointer_traits=boost::pointer_traits<pointer>;
auto aal=access_alloc(al);
access_traits::deallocate(
aal,pointer_traits::pointer_to(*arrays.group_accesses),
arrays.groups_size_mask+1);
if(arrays.elements_){
boost::allocator_deallocate(
al,arrays.group_accesses_,arrays.groups_size_mask+1);
}
}
group_access *group_accesses;
group_access_pointer group_accesses_;
};
struct atomic_size_control
@ -480,8 +476,9 @@ public:
x.al(),
typename arrays_type::super{
x.arrays.groups_size_index,x.arrays.groups_size_mask,
reinterpret_cast<group_type*>(x.arrays.groups),
reinterpret_cast<value_type*>(x.arrays.elements)})),
boost::pointer_traits<typename arrays_type::group_type_pointer>::pointer_to(
*reinterpret_cast<group_type*>(boost::to_address(x.arrays.groups_))),
x.arrays.elements_})),
size_ctrl_type{x.size_ctrl.ml,x.size_ctrl.size}}
{
x.empty_initialize();
@ -967,18 +964,18 @@ private:
inline group_shared_lock_guard access(group_shared,std::size_t pos)const
{
return this->arrays.group_accesses[pos].shared_access();
return this->arrays.group_accesses()[pos].shared_access();
}
inline group_exclusive_lock_guard access(
group_exclusive,std::size_t pos)const
{
return this->arrays.group_accesses[pos].exclusive_access();
return this->arrays.group_accesses()[pos].exclusive_access();
}
inline group_insert_counter_type& insert_counter(std::size_t pos)const
{
return this->arrays.group_accesses[pos].insert_counter();
return this->arrays.group_accesses()[pos].insert_counter();
}
/* Const casts value_type& according to the level of group access for
@ -1097,10 +1094,10 @@ private:
prober pb(pos0);
do{
auto pos=pb.get();
auto pg=this->arrays.groups+pos;
auto pg=this->arrays.groups()+pos;
auto mask=pg->match(hash);
if(mask){
auto p=this->arrays.elements+pos*N;
auto p=this->arrays.elements()+pos*N;
BOOST_UNORDERED_PREFETCH_ELEMENTS(p,N);
auto lck=access(access_mode,pos);
do{
@ -1313,7 +1310,7 @@ private:
if(BOOST_LIKELY(rsize.succeeded())){
for(prober pb(pos0);;pb.next(this->arrays.groups_size_mask)){
auto pos=pb.get();
auto pg=this->arrays.groups+pos;
auto pg=this->arrays.groups()+pos;
auto lck=access(group_exclusive{},pos);
auto mask=pg->match_available();
if(BOOST_LIKELY(mask!=0)){
@ -1323,7 +1320,7 @@ private:
/* other thread inserted from pos0, need to start over */
goto startover;
}
auto p=this->arrays.elements+pos*N+n;
auto p=this->arrays.elements()+pos*N+n;
this->construct_element(p,std::forward<Args>(args)...);
rslot.commit();
rsize.commit();
@ -1373,11 +1370,11 @@ private:
auto for_all_elements_while(GroupAccessMode access_mode,F f)const
->decltype(f(nullptr,0,nullptr),bool())
{
auto p=this->arrays.elements;
auto p=this->arrays.elements();
if(p){
for(auto pg=this->arrays.groups,last=pg+this->arrays.groups_size_mask+1;
for(auto pg=this->arrays.groups(),last=pg+this->arrays.groups_size_mask+1;
pg!=last;++pg,p+=N){
auto lck=access(access_mode,(std::size_t)(pg-this->arrays.groups));
auto lck=access(access_mode,(std::size_t)(pg-this->arrays.groups()));
auto mask=this->match_really_occupied(pg,last);
while(mask){
auto n=unchecked_countr_zero(mask);
@ -1405,13 +1402,13 @@ private:
GroupAccessMode access_mode,ExecutionPolicy&& policy,F f)const
->decltype(f(nullptr,0,nullptr),void())
{
if(!this->arrays.elements)return;
auto first=this->arrays.groups,
if(!this->arrays.elements_)return;
auto first=this->arrays.groups(),
last=first+this->arrays.groups_size_mask+1;
std::for_each(std::forward<ExecutionPolicy>(policy),first,last,
[&,this](group_type& g){
auto pos=static_cast<std::size_t>(&g-first);
auto p=this->arrays.elements+pos*N;
auto p=this->arrays.elements()+pos*N;
auto lck=access(access_mode,pos);
auto mask=this->match_really_occupied(&g,last);
while(mask){
@ -1427,13 +1424,13 @@ private:
bool for_all_elements_while(
GroupAccessMode access_mode,ExecutionPolicy&& policy,F f)const
{
if(!this->arrays.elements)return true;
auto first=this->arrays.groups,
if(!this->arrays.elements_)return true;
auto first=this->arrays.groups(),
last=first+this->arrays.groups_size_mask+1;
return std::all_of(std::forward<ExecutionPolicy>(policy),first,last,
[&,this](group_type& g){
auto pos=static_cast<std::size_t>(&g-first);
auto p=this->arrays.elements+pos*N;
auto p=this->arrays.elements()+pos*N;
auto lck=access(access_mode,pos);
auto mask=this->match_really_occupied(&g,last);
while(mask){

View File

@ -920,8 +920,8 @@ inline unsigned int unchecked_countr_zero(int x)
* allocators.
*/
template<typename Group,std::size_t Size>
Group* dummy_groups()
template<typename GroupPtr,std::size_t Size>
GroupPtr dummy_groups()
{
/* Dummy storage initialized as if in an empty container (actually, each
* of its groups is initialized like a separate empty container).
@ -931,54 +931,65 @@ Group* dummy_groups()
* insertion as the container's capacity is precisely zero.
*/
static constexpr typename Group::dummy_group_type
storage[Size]={typename Group::dummy_group_type(),};
using pointer_traits=boost::pointer_traits<GroupPtr>;
using group_type=typename pointer_traits::element_type;
return reinterpret_cast<Group*>(
const_cast<typename Group::dummy_group_type*>(storage));
static constexpr typename group_type::dummy_group_type
storage[Size]={typename group_type::dummy_group_type(),};
group_type* pg=reinterpret_cast<group_type*>(
const_cast<typename group_type::dummy_group_type*>(storage));
return pointer_traits::pointer_to(*pg);
}
template<typename Value,typename Group,typename SizePolicy>
template<typename Value,typename Group,typename SizePolicy,typename Allocator>
struct table_arrays
{
using allocator_type=typename boost::allocator_rebind<Allocator,Value>::type;
using value_type=Value;
using group_type=Group;
static constexpr auto N=group_type::N;
using size_policy=SizePolicy;
using value_type_pointer=
typename boost::allocator_pointer<allocator_type>::type;
using group_type_pointer=
typename boost::pointer_traits<value_type_pointer>::template
rebind<group_type>;
using group_type_pointer_traits=boost::pointer_traits<group_type_pointer>;
table_arrays(std::size_t gsi,std::size_t gsm,group_type *pg,value_type *pe):
groups_size_index{gsi},groups_size_mask{gsm},groups{pg},elements{pe}{}
table_arrays(std::size_t gsi,std::size_t gsm,group_type_pointer pg,value_type_pointer pe):
groups_size_index{gsi},groups_size_mask{gsm},groups_{pg},elements_{pe}{}
template<typename Allocator>
static table_arrays new_(Allocator& al,std::size_t n)
value_type* elements()const noexcept{return boost::to_address(elements_);}
group_type* groups()const noexcept{return boost::to_address(groups_);}
static table_arrays new_(allocator_type al,std::size_t n)
{
using storage_allocator=
typename boost::allocator_rebind<Allocator, Value>::type;
using storage_traits=boost::allocator_traits<storage_allocator>;
using storage_traits=boost::allocator_traits<allocator_type>;
auto groups_size_index=size_index_for<group_type,size_policy>(n);
auto groups_size=size_policy::size(groups_size_index);
table_arrays arrays{groups_size_index,groups_size-1,nullptr,nullptr};
if(!n){
arrays.groups=dummy_groups<group_type,size_policy::min_size()>();
if(!n&&std::is_same<group_type*,group_type_pointer>::value){
arrays.groups_=dummy_groups<group_type_pointer,size_policy::min_size()>();
}
else{
auto sal=storage_allocator(al);
arrays.elements=boost::to_address(
storage_traits::allocate(sal,buffer_size(groups_size)));
auto sal=allocator_type(al);
arrays.elements_=storage_traits::allocate(sal,buffer_size(groups_size));
/* Align arrays.groups to sizeof(group_type). table_iterator critically
* depends on such alignment for its increment operation.
*/
auto p=reinterpret_cast<unsigned char*>(arrays.elements+groups_size*N-1);
auto p=reinterpret_cast<unsigned char*>(arrays.elements()+groups_size*N-1);
p+=(uintptr_t(sizeof(group_type))-
reinterpret_cast<uintptr_t>(p))%sizeof(group_type);
arrays.groups=reinterpret_cast<group_type*>(p);
arrays.groups_=group_type_pointer_traits::pointer_to(*reinterpret_cast<group_type*>(p));
initialize_groups(
arrays.groups,groups_size,
arrays.groups(),groups_size,
std::integral_constant<
bool,
#if BOOST_WORKAROUND(BOOST_LIBSTDCXX_VERSION,<50000)
@ -988,24 +999,20 @@ struct table_arrays
std::is_trivially_constructible<group_type>::value
#endif
>{});
arrays.groups[groups_size-1].set_sentinel();
using difference_type=typename boost::pointer_traits<group_type_pointer>::difference_type;
arrays.groups_[static_cast<difference_type>(groups_size-1)].set_sentinel();
}
return arrays;
}
template<typename Allocator>
static void delete_(Allocator& al,table_arrays& arrays)noexcept
static void delete_(allocator_type al,table_arrays& arrays)noexcept
{
using storage_alloc=typename boost::allocator_rebind<Allocator,Value>::type;
using storage_traits=boost::allocator_traits<storage_alloc>;
using pointer=typename storage_traits::pointer;
using pointer_traits=boost::pointer_traits<pointer>;
using storage_traits=boost::allocator_traits<allocator_type>;
auto sal=storage_alloc(al);
if(arrays.elements){
auto sal=allocator_type(al);
if(arrays.elements_){
storage_traits::deallocate(
sal,pointer_traits::pointer_to(*arrays.elements),
buffer_size(arrays.groups_size_mask+1));
sal,arrays.elements_,buffer_size(arrays.groups_size_mask+1));
}
}
@ -1024,7 +1031,7 @@ struct table_arrays
}
static void initialize_groups(
group_type* groups_,std::size_t size,std::true_type /* memset */)
group_type* pg,std::size_t size,std::true_type /* memset */)
{
/* memset faster/not slower than manual, assumes all zeros is group_type's
* default layout.
@ -1033,19 +1040,19 @@ struct table_arrays
*/
std::memset(
reinterpret_cast<unsigned char*>(groups_),0,sizeof(group_type)*size);
reinterpret_cast<unsigned char*>(pg),0,sizeof(group_type)*size);
}
static void initialize_groups(
group_type* groups_,std::size_t size,std::false_type /* manual */)
group_type* pg,std::size_t size,std::false_type /* manual */)
{
while(size--!=0)::new (groups_++) group_type();
while(size--!=0)::new (pg++) group_type();
}
std::size_t groups_size_index;
std::size_t groups_size_mask;
group_type *groups;
value_type *elements;
std::size_t groups_size_index;
std::size_t groups_size_mask;
group_type_pointer groups_;
value_type_pointer elements_;
};
struct if_constexpr_void_else{void operator()()const{}};
@ -1258,7 +1265,7 @@ public:
>::type;
using alloc_traits=boost::allocator_traits<Allocator>;
using element_type=typename type_policy::element_type;
using arrays_type=Arrays<element_type,group_type,size_policy>;
using arrays_type=Arrays<element_type,group_type,size_policy,Allocator>;
using size_ctrl_type=SizeControl;
using key_type=typename type_policy::key_type;
@ -1489,11 +1496,12 @@ public:
prober pb(pos0);
do{
auto pos=pb.get();
auto pg=arrays.groups+pos;
auto pg=arrays.groups()+pos;
auto mask=pg->match(hash);
if(mask){
BOOST_UNORDERED_ASSUME(arrays.elements!=nullptr);
auto p=arrays.elements+pos*N;
auto elements=arrays.elements();
BOOST_UNORDERED_ASSUME(elements!=nullptr);
auto p=elements+pos*N;
BOOST_UNORDERED_PREFETCH_ELEMENTS(p,N);
do{
auto n=unchecked_countr_zero(mask);
@ -1542,9 +1550,9 @@ public:
void clear()noexcept
{
auto p=arrays.elements;
auto p=arrays.elements();
if(p){
for(auto pg=arrays.groups,last=pg+arrays.groups_size_mask+1;
for(auto pg=arrays.groups(),last=pg+arrays.groups_size_mask+1;
pg!=last;++pg,p+=N){
auto mask=match_really_occupied(pg,last);
while(mask){
@ -1554,7 +1562,7 @@ public:
/* we wipe the entire metadata to reset the overflow byte as well */
pg->initialize();
}
arrays.groups[arrays.groups_size_mask].set_sentinel();
arrays.groups()[arrays.groups_size_mask].set_sentinel();
size_ctrl.ml=initial_max_load();
size_ctrl.size=0;
}
@ -1565,7 +1573,7 @@ public:
std::size_t capacity()const noexcept
{
return arrays.elements?(arrays.groups_size_mask+1)*N-1:0;
return arrays.elements_?(arrays.groups_size_mask+1)*N-1:0;
}
float load_factor()const noexcept
@ -1784,9 +1792,9 @@ public:
static auto for_all_elements_while(const arrays_type& arrays_,F f)
->decltype(f(nullptr,0,nullptr),bool())
{
auto p=arrays_.elements;
auto p=arrays_.elements();
if(p){
for(auto pg=arrays_.groups,last=pg+arrays_.groups_size_mask+1;
for(auto pg=arrays_.groups(),last=pg+arrays_.groups_size_mask+1;
pg!=last;++pg,p+=N){
auto mask=match_really_occupied(pg,last);
while(mask){
@ -1887,7 +1895,7 @@ private:
void fast_copy_elements_from(const table_core& x)
{
if(arrays.elements&&x.arrays.elements){
if(arrays.elements_&&x.arrays.elements_){
copy_elements_array_from(x);
copy_groups_array_from(x);
size_ctrl.ml=std::size_t(x.size_ctrl.ml);
@ -1921,8 +1929,8 @@ private:
* copy-assignable when we're relying on trivial copy constructibility.
*/
std::memcpy(
reinterpret_cast<unsigned char*>(arrays.elements),
reinterpret_cast<unsigned char*>(x.arrays.elements),
reinterpret_cast<unsigned char*>(arrays.elements()),
reinterpret_cast<unsigned char*>(x.arrays.elements()),
x.capacity()*sizeof(value_type));
}
@ -1932,14 +1940,14 @@ private:
std::size_t num_constructed=0;
BOOST_TRY{
x.for_all_elements([&,this](const element_type* p){
construct_element(arrays.elements+(p-x.arrays.elements),*p);
construct_element(arrays.elements()+(p-x.arrays.elements()),*p);
++num_constructed;
});
}
BOOST_CATCH(...){
if(num_constructed){
x.for_all_elements_while([&,this](const element_type* p){
destroy_element(arrays.elements+(p-x.arrays.elements));
destroy_element(arrays.elements()+(p-x.arrays.elements()));
return --num_constructed!=0;
});
}
@ -1964,15 +1972,17 @@ private:
const table_core& x, std::true_type /* -> memcpy */)
{
std::memcpy(
arrays.groups,x.arrays.groups,
arrays.groups(),x.arrays.groups(),
(arrays.groups_size_mask+1)*sizeof(group_type));
}
void copy_groups_array_from(
const table_core& x, std::false_type /* -> manual */)
{
auto pg=arrays.groups();
auto xpg=x.arrays.groups();
for(std::size_t i=0;i<arrays.groups_size_mask+1;++i){
arrays.groups[i]=x.arrays.groups[i];
pg[i]=xpg[i];
}
}
@ -2102,11 +2112,11 @@ private:
{
for(prober pb(pos0);;pb.next(arrays_.groups_size_mask)){
auto pos=pb.get();
auto pg=arrays_.groups+pos;
auto pg=arrays_.groups()+pos;
auto mask=pg->match_available();
if(BOOST_LIKELY(mask!=0)){
auto n=unchecked_countr_zero(mask);
auto p=arrays_.elements+pos*N+n;
auto p=arrays_.elements()+pos*N+n;
construct_element(p,std::forward<Args>(args)...);
pg->set(n,hash);
return {pg,n,p};

View File

@ -9,26 +9,30 @@
#ifndef BOOST_UNORDERED_DETAIL_FOA_ELEMENT_TYPE_HPP
#define BOOST_UNORDERED_DETAIL_FOA_ELEMENT_TYPE_HPP
#include <boost/core/pointer_traits.hpp>
namespace boost{
namespace unordered{
namespace detail{
namespace foa{
template<class T>
template<class T,class VoidPtr>
struct element_type
{
using value_type=T;
value_type* p;
using pointer=typename boost::pointer_traits<VoidPtr>::template rebind<T>;
pointer p;
/*
* we use a deleted copy constructor here so the type is no longer
* trivially copy-constructible which inhibits our memcpy
* optimizations when copying the tables
*/
element_type() = default;
element_type(value_type* p_):p(p_){}
element_type(element_type const&) = delete;
element_type(element_type&& rhs) noexcept
element_type()=default;
element_type(pointer p_):p(p_){}
element_type(element_type const&)=delete;
element_type(element_type&& rhs)noexcept
{
p = rhs.p;
rhs.p = nullptr;

View File

@ -13,7 +13,7 @@ namespace boost {
namespace unordered {
namespace detail {
namespace foa {
template <class Key, class T> struct node_map_types
template <class Key, class T, class VoidPtr> struct node_map_types
{
using key_type = Key;
using mapped_type = T;
@ -24,7 +24,7 @@ namespace boost {
using value_type = std::pair<Key const, T>;
using moved_type = std::pair<raw_key_type&&, raw_mapped_type&&>;
using element_type = foa::element_type<value_type>;
using element_type = foa::element_type<value_type, VoidPtr>;
static value_type& value_from(element_type const& x)
{
@ -83,18 +83,15 @@ namespace boost {
template <class A, class... Args>
static void construct(A& al, element_type* p, Args&&... args)
{
p->p = boost::to_address(boost::allocator_allocate(al, 1));
p->p = boost::allocator_allocate(al, 1);
BOOST_TRY
{
boost::allocator_construct(al, p->p, std::forward<Args>(args)...);
boost::allocator_construct(
al, boost::to_address(p->p), std::forward<Args>(args)...);
}
BOOST_CATCH(...)
{
using pointer_type = typename boost::allocator_pointer<A>::type;
using pointer_traits = boost::pointer_traits<pointer_type>;
boost::allocator_deallocate(
al, pointer_traits::pointer_to(*(p->p)), 1);
boost::allocator_deallocate(al, p->p, 1);
BOOST_RETHROW
}
BOOST_CATCH_END
@ -114,12 +111,8 @@ namespace boost {
static void destroy(A& al, element_type* p) noexcept
{
if (p->p) {
using pointer_type = typename boost::allocator_pointer<A>::type;
using pointer_traits = boost::pointer_traits<pointer_type>;
destroy(al, p->p);
boost::allocator_deallocate(
al, pointer_traits::pointer_to(*(p->p)), 1);
destroy(al, boost::to_address(p->p));
boost::allocator_deallocate(al, p->p, 1);
}
}
};

View File

@ -14,7 +14,7 @@ namespace boost {
namespace detail {
namespace foa {
template <class Key> struct node_set_types
template <class Key, class VoidPtr> struct node_set_types
{
using key_type = Key;
using init_type = Key;
@ -22,7 +22,7 @@ namespace boost {
static Key const& extract(value_type const& key) { return key; }
using element_type = foa::element_type<value_type>;
using element_type = foa::element_type<value_type, VoidPtr>;
static value_type& value_from(element_type const& x) { return *x.p; }
static Key const& extract(element_type const& k) { return *k.p; }
@ -53,17 +53,15 @@ namespace boost {
template <class A, class... Args>
static void construct(A& al, element_type* p, Args&&... args)
{
p->p = boost::to_address(boost::allocator_allocate(al, 1));
p->p = boost::allocator_allocate(al, 1);
BOOST_TRY
{
boost::allocator_construct(al, p->p, std::forward<Args>(args)...);
boost::allocator_construct(
al, boost::to_address(p->p), std::forward<Args>(args)...);
}
BOOST_CATCH(...)
{
boost::allocator_deallocate(al,
boost::pointer_traits<typename boost::allocator_pointer<
A>::type>::pointer_to(*p->p),
1);
boost::allocator_deallocate(al, p->p, 1);
BOOST_RETHROW
}
BOOST_CATCH_END
@ -78,11 +76,8 @@ namespace boost {
static void destroy(A& al, element_type* p) noexcept
{
if (p->p) {
destroy(al, p->p);
boost::allocator_deallocate(al,
boost::pointer_traits<typename boost::allocator_pointer<
A>::type>::pointer_to(*(p->p)),
1);
destroy(al, boost::to_address(p->p));
boost::allocator_deallocate(al, p->p, 1);
}
}
};

View File

@ -363,9 +363,9 @@ public:
iterator begin()noexcept
{
iterator it{this->arrays.groups,0,this->arrays.elements};
if(this->arrays.elements&&
!(this->arrays.groups[0].match_occupied()&0x1))++it;
iterator it{this->arrays.groups(),0,this->arrays.elements()};
if(this->arrays.elements_&&
!(this->arrays.groups()[0].match_occupied()&0x1))++it;
return it;
}
@ -533,8 +533,9 @@ private:
std::move(x.h()),std::move(x.pred()),std::move(x.al()),
arrays_type{
x.arrays.groups_size_index,x.arrays.groups_size_mask,
reinterpret_cast<group_type*>(x.arrays.groups),
reinterpret_cast<value_type*>(x.arrays.elements)},
boost::pointer_traits<typename arrays_type::group_type_pointer>::pointer_to(
*reinterpret_cast<group_type*>(boost::to_address(x.arrays.groups_))),
x.arrays.elements_},
size_ctrl_type{
x.size_ctrl.ml,x.size_ctrl.size}}
{

View File

@ -75,7 +75,8 @@ namespace boost {
template <class Key, class T, class Hash, class KeyEqual, class Allocator>
class unordered_node_map
{
using map_types = detail::foa::node_map_types<Key, T>;
using map_types = detail::foa::node_map_types<Key, T,
typename boost::allocator_void_pointer<Allocator>::type>;
using table_type = detail::foa::table<map_types, Hash, KeyEqual,
typename boost::allocator_rebind<Allocator,

View File

@ -66,7 +66,8 @@ namespace boost {
template <class Key, class Hash, class KeyEqual, class Allocator>
class unordered_node_set
{
using set_types = detail::foa::node_set_types<Key>;
using set_types = detail::foa::node_set_types<Key,
typename boost::allocator_void_pointer<Allocator>::type>;
using table_type = detail::foa::table<set_types, Hash, KeyEqual,
typename boost::allocator_rebind<Allocator,