3 #ifndef __CDS_INTRUSIVE_LAZY_LIST_HRC_H
4 #define __CDS_INTRUSIVE_LAZY_LIST_HRC_H
6 #include <cds/intrusive/impl/lazy_list.h>
7 #include <cds/gc/hrc.h>
8 #include <cds/details/allocator.h>
10 namespace cds { namespace intrusive { namespace lazy_list {
12 // Specialization for HRC GC
13 template <typename Lock, typename Tag>
14 struct node< gc::HRC, Lock, Tag>: public gc::HRC::container_node
16 typedef gc::HRC gc ; ///< Garbage collector
17 typedef Lock lock_type ; ///< Lock type
18 typedef Tag tag ; ///< tag
20 typedef cds::details::marked_ptr<node, 1> marked_ptr ; ///< marked pointer
21 typedef typename gc::template atomic_marked_ptr< marked_ptr> atomic_marked_ptr ; ///< atomic marked pointer specific for GC
23 atomic_marked_ptr m_pNext ; ///< pointer to the next node in the list + logical deletion mark
24 mutable lock_type m_Lock ; ///< Node lock
26 /// Checks if node is marked
27 bool is_marked() const
29 return m_pNext.load(atomics::memory_order_relaxed).bits() != 0;
37 virtual void cleanUp( cds::gc::hrc::ThreadGC * pGC )
39 assert( pGC != nullptr );
40 typename gc::GuardArray<2> aGuards( *pGC );
43 marked_ptr pNextMarked( aGuards.protect( 0, m_pNext ));
44 node * pNext = pNextMarked.ptr();
45 if ( pNext != nullptr && pNext->m_bDeleted.load( atomics::memory_order_acquire ) ) {
46 marked_ptr p = aGuards.protect( 1, pNext->m_pNext );
47 m_pNext.compare_exchange_weak( pNextMarked, p, atomics::memory_order_acquire, atomics::memory_order_relaxed );
56 virtual void terminate( cds::gc::hrc::ThreadGC * pGC, bool bConcurrent )
59 marked_ptr pNext( m_pNext.load(atomics::memory_order_relaxed));
60 do {} while ( !m_pNext.compare_exchange_weak( pNext, marked_ptr(), atomics::memory_order_release, atomics::memory_order_relaxed ) );
63 m_pNext.store( marked_ptr(), atomics::memory_order_relaxed );
70 template <typename NodeType, typename Alloc >
71 class boundary_nodes< gc::HRC, NodeType, Alloc >
73 typedef NodeType node_type;
74 typedef cds::details::Allocator< node_type, Alloc> cxx_allocator ; ///< allocator for the tail node
76 struct boundary_disposer
78 void operator()( node_type * p )
80 cxx_allocator().Delete( p );
91 m_pHead = cxx_allocator().New();
92 m_pTail = cxx_allocator().New();
97 cds::gc::HRC::template retire<boundary_disposer>( m_pHead );
98 cds::gc::HRC::template retire<boundary_disposer>( m_pTail );
106 node_type const * head() const
114 node_type const * tail() const
123 template <typename Node, typename MemoryModel>
124 struct node_cleaner< gc::HRC, Node, MemoryModel> {
125 void operator()( Node * p )
127 typedef typename Node::marked_ptr marked_ptr;
128 p->m_pNext.store( marked_ptr(), MemoryModel::memory_order_release );
129 //p->clean( MemoryModel::memory_order_release );
137 template <typename NODE>
138 struct link_checker_selector< gc::HRC, NODE, opt::never_check_link >
140 typedef link_checker<NODE> type;
143 template <typename NODE>
144 struct link_checker_selector< gc::HRC, NODE, opt::debug_check_link >
146 typedef link_checker<NODE> type;
150 }}} // namespace cds::intrusive::lazy_list
152 #endif // #ifndef __CDS_INTRUSIVE_LAZY_LIST_HP_H