Added branch prediction (cds_likely()/cds_unlikely()) in Michael's list
authorkhizmax <khizmax@gmail.com>
Tue, 5 Jul 2016 15:52:54 +0000 (18:52 +0300)
committerkhizmax <khizmax@gmail.com>
Tue, 5 Jul 2016 15:52:54 +0000 (18:52 +0300)
cds/intrusive/impl/michael_list.h
cds/intrusive/michael_list_nogc.h
cds/intrusive/michael_list_rcu.h

index 079996d4af20b0677756d7d25a9fb2637a62f2c5..7f6ab5b0e1821254af29ca937e27bb248ddcab18 100644 (file)
@@ -274,7 +274,7 @@ namespace cds { namespace intrusive {
 
             marked_node_ptr cur(pos.pCur);
             pNode->m_pNext.store( cur, memory_model::memory_order_release );
-            if ( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed ))
+            if ( cds_likely( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed )))
                 return true;
 
             pNode->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
@@ -288,11 +288,11 @@ namespace cds { namespace intrusive {
 
             // Mark the node (logical deleting)
             marked_node_ptr next(pos.pNext, 0);
-            if ( pos.pCur->m_pNext.compare_exchange_strong( next, marked_node_ptr(pos.pNext, 1), memory_model::memory_order_release, atomics::memory_order_relaxed )) {
+            if ( cds_likely( pos.pCur->m_pNext.compare_exchange_strong( next, marked_node_ptr(pos.pNext, 1), memory_model::memory_order_release, atomics::memory_order_relaxed ))) {
                 // physical deletion may be performed by search function if it detects that a node is logically deleted (marked)
                 // CAS may be successful here or in other thread that searching something
                 marked_node_ptr cur(pos.pCur);
-                if ( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr( pos.pNext ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))
+                if ( cds_likely( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr( pos.pNext ), memory_model::memory_order_acquire, atomics::memory_order_relaxed )))
                     retire_node( pos.pCur );
                 return true;
             }
@@ -321,7 +321,7 @@ namespace cds { namespace intrusive {
                     do {
                         pNext = pCur->m_pNext.load(memory_model::memory_order_relaxed);
                         g.assign( node_traits::to_value_ptr( pNext.ptr()));
-                    } while ( pNext != pCur->m_pNext.load(memory_model::memory_order_acquire));
+                    } while ( cds_unlikely( pNext != pCur->m_pNext.load(memory_model::memory_order_acquire)));
 
                     if ( pNext.ptr())
                         m_pNode = m_Guard.assign( g.template get<value_type>());
@@ -343,7 +343,7 @@ namespace cds { namespace intrusive {
                         m_pNode = nullptr;
                         m_Guard.clear();
                     }
-                    if ( p == pNode.load(memory_model::memory_order_acquire))
+                    if ( cds_likely( p == pNode.load(memory_model::memory_order_acquire)))
                         break;
                 }
             }
@@ -904,7 +904,7 @@ namespace cds { namespace intrusive {
                 head = m_pHead.load(memory_model::memory_order_relaxed);
                 if ( head.ptr())
                     guard.assign( node_traits::to_value_ptr( *head.ptr()));
-                if ( m_pHead.load(memory_model::memory_order_acquire) == head ) {
+                if ( cds_likely( m_pHead.load(memory_model::memory_order_acquire) == head )) {
                     if ( head.ptr() == nullptr )
                         break;
                     value_type& val = *node_traits::to_value_ptr( *head.ptr());
@@ -1001,7 +1001,7 @@ namespace cds { namespace intrusive {
             node_type * pNode = node_traits::to_node_ptr( val );
             while ( true ) {
                 if ( search( refHead, val, pos, key_comparator())) {
-                    if ( pos.pCur->m_pNext.load(memory_model::memory_order_acquire).bits()) {
+                    if ( cds_unlikely( pos.pCur->m_pNext.load(memory_model::memory_order_acquire).bits())) {
                         back_off()();
                         continue;       // the node found is marked as deleted
                     }
@@ -1160,7 +1160,7 @@ namespace cds { namespace intrusive {
                         {
                             return node_traits::to_value_ptr( p.ptr());
                         });
-                if ( pPrev->load(memory_model::memory_order_acquire).all() != pCur.ptr()) {
+                if ( cds_unlikely( pPrev->load(memory_model::memory_order_acquire).all() != pCur.ptr())) {
                     bkoff();
                     goto try_again;
                 }
@@ -1169,7 +1169,7 @@ namespace cds { namespace intrusive {
                 if ( pNext.bits() == 1 ) {
                     // pCur marked i.e. logically deleted. Help the erase/unlink function to unlink pCur node
                     marked_node_ptr cur( pCur.ptr());
-                    if ( pPrev->compare_exchange_strong( cur, marked_node_ptr( pNext.ptr()), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
+                    if ( cds_unlikely( pPrev->compare_exchange_strong( cur, marked_node_ptr( pNext.ptr()), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))) {
                         retire_node( pCur.ptr());
                     }
                     else {
index 06ac410dc43773c70ad554508666be8eae4d80e3..5f75a6f1091035b4dc0185527f81f4ded546c3d4 100644 (file)
@@ -154,7 +154,7 @@ namespace cds { namespace intrusive {
             link_checker::is_empty( pNode );
 
             pNode->m_pNext.store( pos.pCur, memory_model::memory_order_relaxed );
-            if ( pos.pPrev->compare_exchange_strong( pos.pCur, pNode, memory_model::memory_order_release, atomics::memory_order_relaxed ))
+            if ( cds_likely( pos.pPrev->compare_exchange_strong( pos.pCur, pNode, memory_model::memory_order_release, atomics::memory_order_relaxed )))
                 return true;
 
             pNode->m_pNext.store( nullptr, memory_model::memory_order_relaxed );
@@ -475,7 +475,7 @@ namespace cds { namespace intrusive {
         void clear( Disposer disp )
         {
             node_type * pHead = m_pHead.load(memory_model::memory_order_relaxed);
-            do {} while ( !m_pHead.compare_exchange_weak( pHead, nullptr, memory_model::memory_order_relaxed ) );
+            do {} while ( cds_unlikely( !m_pHead.compare_exchange_weak( pHead, nullptr, memory_model::memory_order_relaxed )));
 
             while ( pHead ) {
                 node_type * p = pHead->m_pNext.load(memory_model::memory_order_relaxed);
@@ -648,12 +648,12 @@ namespace cds { namespace intrusive {
                 }
 
                 pNext = pCur->m_pNext.load(memory_model::memory_order_relaxed);
-                if ( pCur->m_pNext.load(memory_model::memory_order_acquire) != pNext ) {
+                if ( cds_unlikely( pCur->m_pNext.load(memory_model::memory_order_acquire) != pNext )) {
                     bkoff();
                     goto try_again;
                 }
 
-                if ( pPrev->load(memory_model::memory_order_acquire) != pCur ) {
+                if ( cds_unlikely( pPrev->load(memory_model::memory_order_acquire) != pCur )) {
                     bkoff();
                     goto try_again;
                 }
index c1635bd740e690031c8ce908fdf54050e0ded787..59f8563f2f22d09f089df49ea998a3e3df75996c 100644 (file)
@@ -247,7 +247,7 @@ namespace cds { namespace intrusive {
 
             marked_node_ptr p( pos.pCur );
             pNode->m_pNext.store( p, memory_model::memory_order_release );
-            if ( pos.pPrev->compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed ))
+            if ( cds_likely( pos.pPrev->compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed )))
                 return true;
 
             pNode->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
@@ -269,11 +269,11 @@ namespace cds { namespace intrusive {
             // Mark the node (logical deletion)
             marked_node_ptr next(pos.pNext, 0);
 
-            if ( pos.pCur->m_pNext.compare_exchange_strong( next, next | nMask, memory_model::memory_order_release, atomics::memory_order_relaxed )) {
+            if ( cds_likely( pos.pCur->m_pNext.compare_exchange_strong( next, next | nMask, memory_model::memory_order_release, atomics::memory_order_relaxed ))) {
 
                 // Try physical removal - fast path
                 marked_node_ptr cur(pos.pCur);
-                if ( pos.pPrev->compare_exchange_strong(cur, marked_node_ptr(pos.pNext), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
+                if ( cds_likely( pos.pPrev->compare_exchange_strong(cur, marked_node_ptr(pos.pNext), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))) {
                     if ( nMask == erase_mask )
                         link_to_remove_chain( pos, pos.pCur );
                 }
@@ -850,7 +850,7 @@ namespace cds { namespace intrusive {
             RCU \p synchronize method can be called.
             Note that depending on RCU type used the \ref disposer invocation can be deferred.
 
-            The function can throw \p cds::urcu::rcu_deadlock exception if an deadlock is encountered and
+            The function can throw \p cds::urcu::rcu_deadlock exception if a deadlock is encountered and
             deadlock checking policy is \p opt::v::rcu_throw_deadlock.
         */
         void clear()
@@ -866,9 +866,9 @@ namespace cds { namespace intrusive {
                         if ( !pHead.ptr() )
                             break;
                         marked_node_ptr pNext( pHead->m_pNext.load(memory_model::memory_order_relaxed) );
-                        if ( !pHead->m_pNext.compare_exchange_weak( pNext, pNext | 1, memory_model::memory_order_acquire, memory_model::memory_order_relaxed ))
+                        if ( cds_unlikely( !pHead->m_pNext.compare_exchange_weak( pNext, pNext | 1, memory_model::memory_order_acquire, memory_model::memory_order_relaxed )))
                             continue;
-                        if ( !m_pHead.compare_exchange_weak( pHead, marked_node_ptr(pNext.ptr()), memory_model::memory_order_release, memory_model::memory_order_relaxed ))
+                        if ( cds_unlikely( !m_pHead.compare_exchange_weak( pHead, marked_node_ptr(pNext.ptr()), memory_model::memory_order_release, memory_model::memory_order_relaxed )))
                             continue;
                     }
 
@@ -1138,8 +1138,8 @@ namespace cds { namespace intrusive {
 
                 pNext = pCur->m_pNext.load(memory_model::memory_order_acquire);
 
-                if ( pPrev->load(memory_model::memory_order_acquire) != pCur
-                    || pNext != pCur->m_pNext.load(memory_model::memory_order_acquire))
+                if ( cds_unlikely( pPrev->load(memory_model::memory_order_acquire) != pCur
+                    || pNext != pCur->m_pNext.load(memory_model::memory_order_acquire )))
                 {
                     bkoff();
                     goto try_again;
@@ -1147,7 +1147,7 @@ namespace cds { namespace intrusive {
 
                 if ( pNext.bits() ) {
                     // pCur is marked as deleted. Try to unlink it from the list
-                    if ( pPrev->compare_exchange_weak( pCur, marked_node_ptr( pNext.ptr() ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ) ) {
+                    if ( cds_likely( pPrev->compare_exchange_weak( pCur, marked_node_ptr( pNext.ptr() ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))) {
                         if ( pNext.bits() == erase_mask )
                             link_to_remove_chain( pos, pCur.ptr() );
                     }