From a41fa2685d2e4d14f48fb4000478b5c2db07910e Mon Sep 17 00:00:00 2001 From: khizmax Date: Mon, 4 Jul 2016 21:17:20 +0300 Subject: [PATCH] Added likely()/unlikely() to free_list --- cds/intrusive/free_list.h | 11 ++++++----- cds/intrusive/free_list_tagged.h | 4 ++-- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/cds/intrusive/free_list.h b/cds/intrusive/free_list.h index de92e06e..870ea2a9 100644 --- a/cds/intrusive/free_list.h +++ b/cds/intrusive/free_list.h @@ -39,7 +39,7 @@ namespace cds { namespace intrusive { /** @ingroup cds_intrusive_helper Free list is a helper class intended for reusing objects instead of freeing them completely; - this avoids the overhead of \p malloc(), and also avoids its worst-case behaviour of taking an operating system lock. + this avoids the overhead of \p malloc(), and also avoids its worst-case behavior of taking an operating system lock. So, the free list can be considered as a specialized allocator for objects of some type. The algorithm is taken from this article. @@ -137,8 +137,9 @@ namespace cds { namespace intrusive { while ( head != nullptr ) { auto prevHead = head; auto refs = head->m_freeListRefs.load( atomics::memory_order_relaxed ); - if ( (refs & c_RefsMask) == 0 || !head->m_freeListRefs.compare_exchange_strong( refs, refs + 1, - atomics::memory_order_acquire, atomics::memory_order_relaxed )) + + if ( cds_unlikely( (refs & c_RefsMask) == 0 || !head->m_freeListRefs.compare_exchange_strong( refs, refs + 1, + atomics::memory_order_acquire, atomics::memory_order_relaxed ))) { head = m_Head.load( atomics::memory_order_acquire ); continue; @@ -148,7 +149,7 @@ namespace cds { namespace intrusive { // we can read the next and not worry about it changing between now and the time // we do the CAS node * next = head->m_freeListNext.load( atomics::memory_order_relaxed ); - if ( m_Head.compare_exchange_strong( head, next, atomics::memory_order_acquire, atomics::memory_order_relaxed )) { + if ( cds_likely( m_Head.compare_exchange_strong( head, next, atomics::memory_order_acquire, atomics::memory_order_relaxed ))) { // Yay, got the node. This means it was on the list, which means // shouldBeOnFreeList must be false no matter the refcount (because // nobody else knows it's been taken off yet, it can't have been put back on). @@ -218,7 +219,7 @@ namespace cds { namespace intrusive { while ( true ) { pNode->m_freeListNext.store( head, atomics::memory_order_relaxed ); pNode->m_freeListRefs.store( 1, atomics::memory_order_release ); - if ( !m_Head.compare_exchange_strong( head, pNode, atomics::memory_order_release, atomics::memory_order_relaxed )) { + if ( cds_unlikely( !m_Head.compare_exchange_strong( head, pNode, atomics::memory_order_release, atomics::memory_order_relaxed ))) { // Hmm, the add failed, but we can only try again when the refcount goes back to zero if ( pNode->m_freeListRefs.fetch_add( c_ShouldBeOnFreeList - 1, atomics::memory_order_release ) == 1 ) continue; diff --git a/cds/intrusive/free_list_tagged.h b/cds/intrusive/free_list_tagged.h index d1fbbd5d..38eefe98 100644 --- a/cds/intrusive/free_list_tagged.h +++ b/cds/intrusive/free_list_tagged.h @@ -119,7 +119,7 @@ namespace cds { namespace intrusive { do { newHead.tag = currentHead.tag + 1; pNode->m_freeListNext.store( currentHead.ptr, atomics::memory_order_relaxed ); - } while ( !m_Head.compare_exchange_weak( currentHead, newHead, atomics::memory_order_release, atomics::memory_order_relaxed )); + } while ( cds_unlikely( !m_Head.compare_exchange_weak( currentHead, newHead, atomics::memory_order_release, atomics::memory_order_relaxed ))); } /// Gets a node from the free list. If the list is empty, returns \p nullptr @@ -130,7 +130,7 @@ namespace cds { namespace intrusive { while ( currentHead.ptr != nullptr ) { newHead.ptr = currentHead.ptr->m_freeListNext.load( atomics::memory_order_relaxed ); newHead.tag = currentHead.tag + 1; - if ( m_Head.compare_exchange_weak( currentHead, newHead, atomics::memory_order_release, atomics::memory_order_acquire ) ) + if ( cds_likely( m_Head.compare_exchange_weak( currentHead, newHead, atomics::memory_order_release, atomics::memory_order_acquire ))) break; } return currentHead.ptr; -- 2.34.1