3 #ifndef __CDS_INTRUSIVE_MICHAEL_LIST_HRC_H
4 #define __CDS_INTRUSIVE_MICHAEL_LIST_HRC_H
6 #include <cds/intrusive/michael_list_impl.h>
7 #include <cds/gc/hrc.h>
9 namespace cds { namespace intrusive { namespace michael_list {
11 // Specialization for HRC GC
12 template <typename Tag>
13 struct node< gc::HRC, Tag>: public gc::HRC::container_node
15 typedef gc::HRC gc ; ///< Garbage collector
16 typedef Tag tag ; ///< tag
18 typedef cds::details::marked_ptr<node, 1> marked_ptr ; ///< marked pointer
19 typedef typename gc::atomic_marked_ptr< marked_ptr> atomic_marked_ptr ; ///< atomic marked pointer
20 atomic_marked_ptr m_pNext ; ///< pointer to the next node in the stack
27 virtual void cleanUp( cds::gc::hrc::ThreadGC * pGC )
30 typename gc::GuardArray<2> aGuards( *pGC );
33 marked_ptr pNextMarked( aGuards.protect( 0, m_pNext ));
34 node * pNext = pNextMarked.ptr();
35 if ( pNext && pNext->m_bDeleted.load(atomics::memory_order_acquire) ) {
36 marked_ptr p = aGuards.protect( 1, pNext->m_pNext );
37 m_pNext.compare_exchange_strong( pNextMarked, p, atomics::memory_order_acquire, atomics::memory_order_relaxed );
46 virtual void terminate( cds::gc::hrc::ThreadGC * pGC, bool bConcurrent )
49 marked_ptr pNext = m_pNext.load(atomics::memory_order_acquire);
50 do {} while ( !m_pNext.compare_exchange_weak( pNext, marked_ptr(), atomics::memory_order_release, atomics::memory_order_relaxed ) );
53 m_pNext.store( marked_ptr(), atomics::memory_order_relaxed );
61 template <typename NODE>
62 struct link_checker_selector< gc::HRC, NODE, opt::never_check_link >
64 typedef link_checker<NODE> type;
67 template <typename NODE>
68 struct link_checker_selector< gc::HRC, NODE, opt::debug_check_link >
70 typedef link_checker<NODE> type;
74 }}} // namespace cds::intrusive::michael_list
76 #endif // #ifndef __CDS_INTRUSIVE_MICHAEL_LIST_HP_H