Fixed atomic initialization, ordering optimization
authorkhizmax <libcds.dev@gmail.com>
Sat, 15 Apr 2017 09:16:55 +0000 (12:16 +0300)
committerkhizmax <libcds.dev@gmail.com>
Sat, 15 Apr 2017 09:16:55 +0000 (12:16 +0300)
cds/intrusive/basket_queue.h
cds/intrusive/optimistic_queue.h

index 890d33f50111591555056b2024ea617a24f2081d..ad0b9479d231067a146ac17b5aef87b563221aad 100644 (file)
@@ -66,8 +66,9 @@ namespace cds { namespace intrusive {
             atomic_marked_ptr m_pNext ; ///< pointer to the next node in the container
 
             node()
-                : m_pNext( nullptr )
-            {}
+            {
+                m_pNext.store( marked_ptr(), atomics::memory_order_release );
+            }
         };
 
         using cds::intrusive::single_link::default_hook;
@@ -663,12 +664,12 @@ namespace cds { namespace intrusive {
             while ( true ) {
                 t = guard.protect( m_pTail, []( marked_ptr p ) -> value_type * { return node_traits::to_value_ptr( p.ptr());});
 
-                marked_ptr pNext = t->m_pNext.load(memory_model::memory_order_acquire );
+                marked_ptr pNext = t->m_pNext.load(memory_model::memory_order_relaxed );
 
                 if ( pNext.ptr() == nullptr ) {
                     pNew->m_pNext.store( marked_ptr(), memory_model::memory_order_relaxed );
                     if ( t->m_pNext.compare_exchange_weak( pNext, marked_ptr(pNew), memory_model::memory_order_release, atomics::memory_order_relaxed )) {
-                        if ( !m_pTail.compare_exchange_strong( t, marked_ptr(pNew), memory_model::memory_order_release, atomics::memory_order_acquire ))
+                        if ( !m_pTail.compare_exchange_strong( t, marked_ptr(pNew), memory_model::memory_order_release, atomics::memory_order_relaxed ))
                             m_Stat.onAdvanceTailFailed();
                         break;
                     }
@@ -681,7 +682,7 @@ namespace cds { namespace intrusive {
                     pNext = gNext.protect( t->m_pNext, []( marked_ptr p ) -> value_type * { return node_traits::to_value_ptr( p.ptr());});
 
                     // add to the basket
-                    if ( m_pTail.load(memory_model::memory_order_acquire) == t
+                    if ( m_pTail.load( memory_model::memory_order_relaxed ) == t
                          && t->m_pNext.load( memory_model::memory_order_relaxed) == pNext
                          && !pNext.bits())
                     {
@@ -700,9 +701,7 @@ namespace cds { namespace intrusive {
                     typename gc::template GuardArray<2> g;
                     g.assign( 0, node_traits::to_value_ptr( pNext.ptr()));
                     if ( m_pTail.load( memory_model::memory_order_acquire ) != t
-
                       || t->m_pNext.load( memory_model::memory_order_relaxed ) != pNext )
-
                     {
                         m_Stat.onEnqueueRace();
                         bkoff();
@@ -711,14 +710,14 @@ namespace cds { namespace intrusive {
 
                     marked_ptr p;
                     bool bTailOk = true;
-                    while ( (p = pNext->m_pNext.load( memory_model::memory_order_relaxed )).ptr() != nullptr )
+                    while ( (p = pNext->m_pNext.load( memory_model::memory_order_acquire )).ptr() != nullptr )
                     {
-                        bTailOk = m_pTail.load( memory_model::memory_order_acquire ) == t;
+                        bTailOk = m_pTail.load( memory_model::memory_order_relaxed ) == t;
                         if ( !bTailOk )
                             break;
 
                         g.assign( 1, node_traits::to_value_ptr( p.ptr()));
-                        if ( pNext->m_pNext.load(memory_model::memory_order_acquire) != p )
+                        if ( pNext->m_pNext.load( memory_model::memory_order_relaxed ) != p )
                             continue;
                         pNext = p;
                         g.assign( 0, g.template get<value_type>( 1 ));
index 3e6118a1568e90ab518943016698df6c9b5c95f9..3d00eef39cf575854da8c63325a8e22983bb16dc 100644 (file)
@@ -61,9 +61,10 @@ namespace cds { namespace intrusive {
             atomic_node_ptr m_pPrev ;   ///< Pointer to previous node
 
             CDS_CONSTEXPR node() CDS_NOEXCEPT
-                : m_pNext( nullptr )
-                , m_pPrev( nullptr )
-            {}
+            {
+                m_pNext.store( nullptr, atomics::memory_order_relaxed );
+                m_pPrev.store( nullptr, atomics::memory_order_release );
+            }
         };
 
         //@cond
@@ -611,8 +612,8 @@ namespace cds { namespace intrusive {
             back_off bkoff;
 
             guards.assign( 1, &val );
-            node_type * pTail = guards.protect( 0, m_pTail, [](node_type * p) -> value_type * {return node_traits::to_value_ptr(p);} );   // Read the tail
             while( true ) {
+                node_type * pTail = guards.protect( 0, m_pTail, []( node_type * p ) -> value_type * { return node_traits::to_value_ptr( p ); } );   // Read the tail
                 pNew->m_pNext.store( pTail, memory_model::memory_order_relaxed );
                 if ( m_pTail.compare_exchange_strong( pTail, pNew, memory_model::memory_order_release, atomics::memory_order_acquire )) { // Try to CAS the tail
                     pTail->m_pPrev.store( pNew, memory_model::memory_order_release ); // Success, write prev
@@ -620,7 +621,6 @@ namespace cds { namespace intrusive {
                     m_Stat.onEnqueue();
                     break;     // Enqueue done!
                 }
-                guards.assign( 0, node_traits::to_value_ptr( pTail ));  // pTail has been changed by CAS above
                 m_Stat.onEnqueueRace();
                 bkoff();
             }