Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 25 additions & 17 deletions include/boost/lockfree/detail/freelist.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc
{
struct freelist_node
{
tagged_ptr< freelist_node > next;
atomic< tagged_ptr< freelist_node > > next;
};

typedef tagged_ptr< freelist_node > tagged_node_ptr;
Expand Down Expand Up @@ -134,7 +134,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc
while ( current ) {
freelist_node* current_ptr = current.get_ptr();
if ( current_ptr )
current = current_ptr->next;
current = current_ptr->next.load();
Alloc::deallocate( (T*)current_ptr, 1 );
}
}
Expand Down Expand Up @@ -195,7 +195,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc
return 0;
}

freelist_node* new_pool_ptr = old_pool->next.get_ptr();
freelist_node* new_pool_ptr = old_pool->next.load( memory_order_acquire ).get_ptr();
tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_next_tag() );

if ( pool_.compare_exchange_weak( old_pool, new_pool ) ) {
Expand All @@ -219,7 +219,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc
return 0;
}

freelist_node* new_pool_ptr = old_pool->next.get_ptr();
freelist_node* new_pool_ptr = old_pool->next.load( memory_order_relaxed ).get_ptr();
tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_next_tag() );

pool_.store( new_pool, memory_order_relaxed );
Expand All @@ -246,7 +246,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc

for ( ;; ) {
tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_tag() );
new_pool->next.set_ptr( old_pool.get_ptr() );
new_pool->next.store( old_pool, memory_order_release );

if ( pool_.compare_exchange_weak( old_pool, new_pool ) )
return;
Expand All @@ -260,7 +260,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc
freelist_node* new_pool_ptr = reinterpret_cast< freelist_node* >( node );

tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_tag() );
new_pool->next.set_ptr( old_pool.get_ptr() );
new_pool->next.store( old_pool, memory_order_relaxed );

pool_.store( new_pool, memory_order_relaxed );
}
Expand Down Expand Up @@ -406,15 +406,17 @@ class fixed_size_freelist : NodeStorage
{
struct freelist_node
{
tagged_index next;
atomic< tagged_index > next;
};

void initialize( void )
{
T* nodes = NodeStorage::nodes();
for ( std::size_t i = 0; i != NodeStorage::node_count(); ++i ) {
tagged_index* next_index = reinterpret_cast< tagged_index* >( nodes + i );
next_index->set_index( null_handle() );
atomic< tagged_index >* next_atomic = reinterpret_cast< atomic< tagged_index >* >( nodes + i );
tagged_index null_index;
null_index.set_index( null_handle() );
next_atomic->store( null_index, memory_order_relaxed );

#ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
destruct< false >( nodes + i );
Expand Down Expand Up @@ -568,10 +570,11 @@ class fixed_size_freelist : NodeStorage
if ( index == null_handle() )
return index;

T* old_node = NodeStorage::nodes() + index;
tagged_index* next_index = reinterpret_cast< tagged_index* >( old_node );
T* old_node = NodeStorage::nodes() + index;
atomic< tagged_index >* next_atomic = reinterpret_cast< atomic< tagged_index >* >( old_node );
tagged_index next_index = next_atomic->load( memory_order_acquire );

tagged_index new_pool( next_index->get_index(), old_pool.get_next_tag() );
tagged_index new_pool( next_index.get_index(), old_pool.get_next_tag() );

if ( pool_.compare_exchange_weak( old_pool, new_pool ) )
return old_pool.get_index();
Expand All @@ -586,10 +589,11 @@ class fixed_size_freelist : NodeStorage
if ( index == null_handle() )
return index;

T* old_node = NodeStorage::nodes() + index;
tagged_index* next_index = reinterpret_cast< tagged_index* >( old_node );
T* old_node = NodeStorage::nodes() + index;
atomic< tagged_index >* next_atomic = reinterpret_cast< atomic< tagged_index >* >( old_node );
tagged_index next_index = next_atomic->load( memory_order_relaxed );

tagged_index new_pool( next_index->get_index(), old_pool.get_next_tag() );
tagged_index new_pool( next_index.get_index(), old_pool.get_next_tag() );

pool_.store( new_pool, memory_order_relaxed );
return old_pool.get_index();
Expand All @@ -611,7 +615,9 @@ class fixed_size_freelist : NodeStorage

for ( ;; ) {
tagged_index new_pool( index, old_pool.get_tag() );
new_pool_node->next.set_index( old_pool.get_index() );
tagged_index next_value;
next_value.set_index( old_pool.get_index() );
new_pool_node->next.store( next_value, memory_order_release );

if ( pool_.compare_exchange_weak( old_pool, new_pool ) )
return;
Expand All @@ -624,7 +630,9 @@ class fixed_size_freelist : NodeStorage
tagged_index old_pool = pool_.load( memory_order_consume );

tagged_index new_pool( index, old_pool.get_tag() );
new_pool_node->next.set_index( old_pool.get_index() );
tagged_index next_value;
next_value.set_index( old_pool.get_index() );
new_pool_node->next.store( next_value, memory_order_relaxed );

pool_.store( new_pool );
}
Expand Down
39 changes: 22 additions & 17 deletions include/boost/lockfree/stack.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,16 +86,20 @@ class stack
{
node( const T& val ) :
v( val )
{}
{
next.store( handle_t(), detail::memory_order_relaxed );
}

node( T&& val ) :
v( std::forward< T >( val ) )
{}
{
next.store( handle_t(), detail::memory_order_relaxed );
}

typedef typename detail::select_tagged_handle< node, node_based >::handle_type handle_t;

handle_t next;
T v;
detail::atomic< handle_t > next;
T v;
};

typedef typename detail::extract_allocator< bound_args, node >::type node_allocator;
Expand Down Expand Up @@ -264,7 +268,7 @@ class stack
tagged_node_handle old_tos = tos.load( detail::memory_order_relaxed );
for ( ;; ) {
tagged_node_handle new_tos( pool.get_handle( new_top_node ), old_tos.get_tag() );
end_node->next = pool.get_handle( old_tos );
end_node->next.store( pool.get_handle( old_tos ), detail::memory_order_release );

if ( tos.compare_exchange_weak( old_tos, new_tos ) )
break;
Expand All @@ -276,7 +280,7 @@ class stack
tagged_node_handle old_tos = tos.load( detail::memory_order_relaxed );

tagged_node_handle new_tos( pool.get_handle( new_top_node ), old_tos.get_tag() );
end_node->next = pool.get_handle( old_tos );
end_node->next.store( pool.get_handle( old_tos ), detail::memory_order_relaxed );

tos.store( new_tos, memory_order_relaxed );
}
Expand All @@ -292,7 +296,7 @@ class stack
}

node* new_top_node = end_node;
end_node->next = NULL;
end_node->next.store( pool.null_handle(), detail::memory_order_relaxed );

BOOST_TRY
{
Expand All @@ -301,14 +305,14 @@ class stack
node* newnode = pool.template construct< Threadsafe, Bounded >( *it );
if ( newnode == NULL )
break;
newnode->next = new_top_node;
new_top_node = newnode;
newnode->next.store( pool.get_handle( new_top_node ), detail::memory_order_relaxed );
new_top_node = newnode;
}
}
BOOST_CATCH( ... )
{
for ( node* current_node = new_top_node; current_node != NULL; ) {
node* next = current_node->next;
node* next = pool.get_pointer( current_node->next.load( detail::memory_order_relaxed ) );
pool.template destruct< Threadsafe >( current_node );
current_node = next;
}
Expand Down Expand Up @@ -622,7 +626,7 @@ class stack
if ( !pool.get_pointer( old_tos ) )
return false;

node* new_tos_ptr = pool.get_pointer( old_tos_pointer->next );
node* new_tos_ptr = pool.get_pointer( old_tos_pointer->next.load( detail::memory_order_relaxed ) );
tagged_node_handle new_tos( pool.get_handle( new_tos_ptr ), old_tos.get_next_tag() );

tos.store( new_tos, memory_order_relaxed );
Expand All @@ -649,7 +653,8 @@ class stack
if ( !old_tos_pointer )
return false;

tagged_node_handle new_tos( old_tos_pointer->next, old_tos.get_next_tag() );
tagged_node_handle new_tos( old_tos_pointer->next.load( detail::memory_order_acquire ),
old_tos.get_next_tag() );

if ( tos.compare_exchange_weak( old_tos, new_tos ) ) {
f( std::move( old_tos_pointer->v ) );
Expand Down Expand Up @@ -709,7 +714,7 @@ class stack
f( std::move( node_pointer->v ) );
element_count += 1;

node* next_node = pool.get_pointer( node_pointer->next );
node* next_node = pool.get_pointer( node_pointer->next.load( detail::memory_order_acquire ) );

if ( !next_node ) {
pool.template destruct< true >( nodes_to_consume );
Expand Down Expand Up @@ -755,10 +760,10 @@ class stack
tagged_node_handle nodes_in_reversed_order;
for ( ;; ) {
node* node_pointer = pool.get_pointer( nodes_to_consume );
node* next_node = pool.get_pointer( node_pointer->next );
node* next_node = pool.get_pointer( node_pointer->next.load( detail::memory_order_acquire ) );

node_pointer->next = pool.get_handle( last_node_pointer );
last_node_pointer = node_pointer;
node_pointer->next.store( pool.get_handle( last_node_pointer ), detail::memory_order_relaxed );
last_node_pointer = node_pointer;

if ( !next_node ) {
nodes_in_reversed_order = nodes_to_consume;
Expand All @@ -774,7 +779,7 @@ class stack
f( std::move( node_pointer->v ) );
element_count += 1;

node* next_node = pool.get_pointer( node_pointer->next );
node* next_node = pool.get_pointer( node_pointer->next.load( detail::memory_order_relaxed ) );

if ( !next_node ) {
pool.template destruct< true >( nodes_in_reversed_order );
Expand Down