Replace CDS_ATOMIC with namespace atomics
[libcds.git] / cds / intrusive / skip_list_impl.h
index bda9e0fadcd275f90eb7e4b8572014a5e73866d9..b58ba19cc22b98988e8b1f89e1aa80783b73edc6 100644 (file)
@@ -3,9 +3,9 @@
 #ifndef __CDS_INTRUSIVE_SKIP_LIST_IMPL_H
 #define __CDS_INTRUSIVE_SKIP_LIST_IMPL_H
 
+#include <type_traits>
+#include <memory>
 #include <cds/intrusive/skip_list_base.h>
-#include <cds/details/std/type_traits.h>
-#include <cds/details/std/memory.h>
 #include <cds/opt/compare.h>
 #include <cds/ref.h>
 #include <cds/details/binary_functor_wrapper.h>
@@ -48,7 +48,7 @@ namespace cds { namespace intrusive {
                 back_off bkoff;
 
                 for (;;) {
-                    if ( m_pNode->next( m_pNode->height() - 1 ).load( CDS_ATOMIC::memory_order_acquire ).bits() ) {
+                    if ( m_pNode->next( m_pNode->height() - 1 ).load( atomics::memory_order_acquire ).bits() ) {
                         // Current node is marked as deleted. So, its next pointer can point to anything
                         // In this case we interrupt our iteration and returns end() iterator.
                         *this = iterator();
@@ -62,7 +62,7 @@ namespace cds { namespace intrusive {
                         bkoff();
                         continue;
                     }
-                    else if ( pp && pp->next( pp->height() - 1 ).load( CDS_ATOMIC::memory_order_relaxed ).bits() ) {
+                    else if ( pp && pp->next( pp->height() - 1 ).load( atomics::memory_order_relaxed ).bits() ) {
                         // p is marked as deleted. Spin waiting for physical removal
                         bkoff();
                         continue;
@@ -75,7 +75,7 @@ namespace cds { namespace intrusive {
 
         public: // for internal use only!!!
             iterator( node_type& refHead )
-                : m_pNode( null_ptr<node_type *>() )
+                : m_pNode( nullptr )
             {
                 back_off bkoff;
 
@@ -89,7 +89,7 @@ namespace cds { namespace intrusive {
 
                     node_type * pp = p.ptr();
                     // Logically deleted node is marked from highest level
-                    if ( !pp->next( pp->height() - 1 ).load( CDS_ATOMIC::memory_order_acquire ).bits() ) {
+                    if ( !pp->next( pp->height() - 1 ).load( atomics::memory_order_acquire ).bits() ) {
                         m_pNode = pp;
                         break;
                     }
@@ -100,7 +100,7 @@ namespace cds { namespace intrusive {
 
         public:
             iterator()
-                : m_pNode( null_ptr<node_type *>())
+                : m_pNode( nullptr )
             {}
 
             iterator( iterator const& s)
@@ -111,16 +111,16 @@ namespace cds { namespace intrusive {
 
             value_type * operator ->() const
             {
-                assert( m_pNode != null_ptr< node_type *>() );
-                assert( node_traits::to_value_ptr( m_pNode ) != null_ptr<value_type *>() );
+                assert( m_pNode != nullptr );
+                assert( node_traits::to_value_ptr( m_pNode ) != nullptr );
 
                 return node_traits::to_value_ptr( m_pNode );
             }
 
             value_ref operator *() const
             {
-                assert( m_pNode != null_ptr< node_type *>() );
-                assert( node_traits::to_value_ptr( m_pNode ) != null_ptr<value_type *>() );
+                assert( m_pNode != nullptr );
+                assert( node_traits::to_value_ptr( m_pNode ) != nullptr );
 
                 return *node_traits::to_value_ptr( m_pNode );
             }
@@ -260,7 +260,7 @@ namespace cds { namespace intrusive {
             bool operator !=(iterator const& i ) const;
         };
         \endcode
-        Note, the iterator object returned by \ref end, \p cend member functions points to \p NULL and should not be dereferenced.
+        Note, the iterator object returned by \ref end, \p cend member functions points to \p nullptr and should not be dereferenced.
 
         <b>How to use</b>
 
@@ -481,7 +481,7 @@ namespace cds { namespace intrusive {
 
         item_counter                m_ItemCounter       ;   ///< item counter
         random_level_generator      m_RandomLevelGen    ;   ///< random level generator instance
-        CDS_ATOMIC::atomic<unsigned int>    m_nHeight   ;   ///< estimated high level
+        atomics::atomic<unsigned int>    m_nHeight   ;   ///< estimated high level
         mutable stat                m_Stat              ;   ///< internal statistics
 
     protected:
@@ -506,7 +506,7 @@ namespace cds { namespace intrusive {
 
         static void dispose_node( value_type * pVal )
         {
-            assert( pVal != null_ptr<value_type *>() );
+            assert( pVal != nullptr );
             typename node_builder::node_disposer()( node_traits::to_node_ptr(pVal) );
             disposer()( pVal );
         }
@@ -535,7 +535,7 @@ namespace cds { namespace intrusive {
                         goto retry;
                     }
 
-                    if ( pCur.ptr() == null_ptr<node_type *>()) {
+                    if ( pCur.ptr() == nullptr ) {
                         // end of the list at level nLevel - goto next level
                         break;
                     }
@@ -550,7 +550,7 @@ namespace cds { namespace intrusive {
                         // pCur is marked, i.e. logically deleted.
                         marked_node_ptr p( pCur.ptr() );
                         if ( pPred->next( nLevel ).compare_exchange_strong( p, marked_node_ptr( pSucc.ptr() ),
-                            memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed ))
+                            memory_model::memory_order_release, atomics::memory_order_relaxed ))
                         {
                             if ( nLevel == 0 ) {
                                 gc::retire( node_traits::to_value_ptr( pCur.ptr() ), dispose_node );
@@ -618,7 +618,7 @@ namespace cds { namespace intrusive {
                         // pCur is marked, i.e. logically deleted.
                         marked_node_ptr p( pCur.ptr() );
                         if ( pPred->next( nLevel ).compare_exchange_strong( p, marked_node_ptr( pSucc.ptr() ),
-                            memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed ))
+                            memory_model::memory_order_release, atomics::memory_order_relaxed ))
                         {
                             if ( nLevel == 0 )
                                 gc::retire( node_traits::to_value_ptr( pCur.ptr() ), dispose_node );
@@ -632,7 +632,7 @@ namespace cds { namespace intrusive {
                 pos.pSucc[ nLevel ] = pCur.ptr();
             }
 
-            return (pos.pCur = pCur.ptr()) != null_ptr<node_type *>();
+            return (pos.pCur = pCur.ptr()) != nullptr;
         }
 
         bool find_max_position( position& pos )
@@ -657,7 +657,7 @@ namespace cds { namespace intrusive {
                         goto retry;
                     }
 
-                    if ( pCur.ptr() == null_ptr<node_type *>()) {
+                    if ( pCur.ptr() == nullptr ) {
                         // end of the list at level nLevel - goto next level
                         break;
                     }
@@ -672,7 +672,7 @@ namespace cds { namespace intrusive {
                         // pCur is marked, i.e. logically deleted.
                         marked_node_ptr p( pCur.ptr() );
                         if ( pPred->next( nLevel ).compare_exchange_strong( p, marked_node_ptr( pSucc.ptr() ),
-                            memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed ))
+                            memory_model::memory_order_release, atomics::memory_order_relaxed ))
                         {
                             if ( nLevel == 0 )
                                 gc::retire( node_traits::to_value_ptr( pCur.ptr() ), dispose_node );
@@ -694,7 +694,7 @@ namespace cds { namespace intrusive {
                 pos.pSucc[ nLevel ] = pCur.ptr();
             }
 
-            return (pos.pCur = pCur.ptr()) != null_ptr<node_type *>();
+            return (pos.pCur = pCur.ptr()) != nullptr;
         }
 
         template <typename Func>
@@ -708,7 +708,7 @@ namespace cds { namespace intrusive {
             {
                 marked_node_ptr p( pos.pSucc[0] );
                 pNode->next( 0 ).store( p, memory_model::memory_order_release );
-                if ( !pos.pPrev[0]->next(0).compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed ) ) {
+                if ( !pos.pPrev[0]->next(0).compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed ) ) {
                     return false;
                 }
                 cds::unref( f )( val );
@@ -718,7 +718,7 @@ namespace cds { namespace intrusive {
                 marked_node_ptr p;
                 while ( true ) {
                     marked_node_ptr q( pos.pSucc[ nLevel ]);
-                    if ( !pNode->next( nLevel ).compare_exchange_strong( p, q, memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed )) {
+                    if ( !pNode->next( nLevel ).compare_exchange_strong( p, q, memory_model::memory_order_release, atomics::memory_order_relaxed )) {
                         // pNode has been marked as removed while we are inserting it
                         // Stop inserting
                         assert( p.bits() );
@@ -726,7 +726,7 @@ namespace cds { namespace intrusive {
                         return true;
                     }
                     p = q;
-                    if ( pos.pPrev[nLevel]->next(nLevel).compare_exchange_strong( q, marked_node_ptr( pNode ), memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed ) )
+                    if ( pos.pPrev[nLevel]->next(nLevel).compare_exchange_strong( q, marked_node_ptr( pNode ), memory_model::memory_order_release, atomics::memory_order_relaxed ) )
                         break;
 
                     // Renew insert position
@@ -744,7 +744,7 @@ namespace cds { namespace intrusive {
         template <typename Func>
         bool try_remove_at( node_type * pDel, position& pos, Func f )
         {
-            assert( pDel != null_ptr<node_type *>());
+            assert( pDel != nullptr );
 
             marked_node_ptr pSucc;
             typename gc::Guard gSucc;
@@ -754,7 +754,7 @@ namespace cds { namespace intrusive {
                 while ( true ) {
                     pSucc = gSucc.protect( pDel->next(nLevel), gc_protect );
                     if ( pSucc.bits() || pDel->next(nLevel).compare_exchange_weak( pSucc, pSucc | 1,
-                         memory_model::memory_order_acquire, CDS_ATOMIC::memory_order_relaxed ))
+                         memory_model::memory_order_acquire, atomics::memory_order_relaxed ))
                     {
                         break;
                     }
@@ -765,7 +765,7 @@ namespace cds { namespace intrusive {
                 pSucc = gSucc.protect( pDel->next(0), gc_protect );
                 marked_node_ptr p( pSucc.ptr() );
                 if ( pDel->next(0).compare_exchange_strong( p, marked_node_ptr(p.ptr(), 1),
-                     memory_model::memory_order_acquire, CDS_ATOMIC::memory_order_relaxed ))
+                     memory_model::memory_order_acquire, atomics::memory_order_relaxed ))
                 {
                     cds::unref(f)( *node_traits::to_value_ptr( pDel ));
 
@@ -775,7 +775,7 @@ namespace cds { namespace intrusive {
                     for ( int nLevel = static_cast<int>( pDel->height() - 1 ); nLevel >= 0; --nLevel ) {
                         pSucc = gSucc.protect( pDel->next(nLevel), gc_protect );
                         if ( !pos.pPrev[nLevel]->next(nLevel).compare_exchange_strong( p, marked_node_ptr(pSucc.ptr()),
-                            memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed) )
+                            memory_model::memory_order_release, atomics::memory_order_relaxed) )
                         {
                             // Make slow erase
                             find_position( *node_traits::to_value_ptr( pDel ), pos, key_comparator(), false );
@@ -1036,7 +1036,7 @@ namespace cds { namespace intrusive {
         {
             unsigned int nCur = m_nHeight.load( memory_model::memory_order_relaxed );
             if ( nCur < nHeight )
-                m_nHeight.compare_exchange_strong( nCur, nHeight, memory_model::memory_order_release, CDS_ATOMIC::memory_order_relaxed );
+                m_nHeight.compare_exchange_strong( nCur, nHeight, memory_model::memory_order_release, atomics::memory_order_relaxed );
         }
         //@endcond
 
@@ -1055,7 +1055,7 @@ namespace cds { namespace intrusive {
             gc::check_available_guards( c_nHazardPtrCount );
 
             // Barrier for head node
-            CDS_ATOMIC::atomic_thread_fence( memory_model::memory_order_release );
+            atomics::atomic_thread_fence( memory_model::memory_order_release );
         }
 
         /// Clears and destructs the skip-list
@@ -1151,7 +1151,7 @@ namespace cds { namespace intrusive {
             node_type * pNode = node_traits::to_node_ptr( val );
             scoped_node_ptr scp( pNode );
             unsigned int nHeight = pNode->height();
-            bool bTowerOk = nHeight > 1 && pNode->get_tower() != null_ptr<atomic_node_ptr *>();
+            bool bTowerOk = nHeight > 1 && pNode->get_tower() != nullptr;
             bool bTowerMade = false;
 
             position pos;
@@ -1223,7 +1223,7 @@ namespace cds { namespace intrusive {
             node_type * pNode = node_traits::to_node_ptr( val );
             scoped_node_ptr scp( pNode );
             unsigned int nHeight = pNode->height();
-            bool bTowerOk = nHeight > 1 && pNode->get_tower() != null_ptr<atomic_node_ptr *>();
+            bool bTowerOk = nHeight > 1 && pNode->get_tower() != nullptr;
             bool bTowerMade = false;
 
 #       ifndef CDS_CXX11_LAMBDA_SUPPORT
@@ -1707,7 +1707,7 @@ namespace cds { namespace intrusive {
         /// Checks if the set is empty
         bool empty() const
         {
-            return m_Head.head()->next(0).load( memory_model::memory_order_relaxed ) == null_ptr<node_type *>();
+            return m_Head.head()->next( 0 ).load( memory_model::memory_order_relaxed ) == nullptr;
         }
 
         /// Clears the set (non-atomic)