3 #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
4 #define CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
6 #include <functional> // std::ref
7 #include <iterator> // std::iterator_traits
10 #include <cds/intrusive/details/feldman_hashset_base.h>
11 #include <cds/details/allocator.h>
12 #include <cds/urcu/details/check_deadlock.h>
13 #include <cds/urcu/exempt_ptr.h>
14 #include <cds/intrusive/details/raw_ptr_disposer.h>
17 namespace cds { namespace intrusive {
18 /// Intrusive hash set based on multi-level array, \ref cds_urcu_desc "RCU" specialization
19 /** @ingroup cds_intrusive_map
20 @anchor cds_intrusive_FeldmanHashSet_rcu
23 - [2013] Steven Feldman, Pierre LaBorde, Damian Dechev "Concurrent Multi-level Arrays:
24 Wait-free Extensible Hash Maps"
26 See algorithm short description @ref cds_intrusive_FeldmanHashSet_hp "here"
28 @note Two important things you should keep in mind when you're using \p %FeldmanHashSet:
29 - all keys must be fixed-size. It means that you cannot use \p std::string as a key for \p %FeldmanHashSet.
30 Instead, for the strings you should use well-known hashing algorithms like <a href="https://en.wikipedia.org/wiki/Secure_Hash_Algorithm">SHA1, SHA2</a>,
31 <a href="https://en.wikipedia.org/wiki/MurmurHash">MurmurHash</a>, <a href="https://en.wikipedia.org/wiki/CityHash">CityHash</a>
32 or its successor <a href="https://code.google.com/p/farmhash/">FarmHash</a> and so on, which
33 converts variable-length strings to fixed-length bit-strings, and use that hash as a key in \p %FeldmanHashSet.
34 - \p %FeldmanHashSet uses a perfect hashing. It means that if two different keys, for example, of type \p std::string,
35 have identical hash then you cannot insert both that keys in the set. \p %FeldmanHashSet does not maintain the key,
36 it maintains its fixed-size hash value.
39 - \p RCU - one of \ref cds_urcu_gc "RCU type"
40 - \p T - a value type to be stored in the set
41 - \p Traits - type traits, the structure based on \p feldman_hashset::traits or result of \p feldman_hashset::make_traits metafunction.
42 \p Traits is the mandatory argument because it has one mandatory type - an @ref feldman_hashset::traits::hash_accessor "accessor"
43 to hash value of \p T. The set algorithm does not calculate that hash value.
45 @note Before including <tt><cds/intrusive/feldman_hashset_rcu.h></tt> you should include appropriate RCU header file,
46 see \ref cds_urcu_gc "RCU type" for list of existing RCU class and corresponding header files.
48 The set supports @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional thread-safe iterators"
50 with some restrictions.
55 #ifdef CDS_DOXYGEN_INVOKED
56 class Traits = feldman_hashset::traits
61 class FeldmanHashSet< cds::urcu::gc< RCU >, T, Traits >: protected feldman_hashset::multilevel_array<T, Traits>
64 typedef feldman_hashset::multilevel_array<T, Traits> base_class;
68 typedef cds::urcu::gc< RCU > gc; ///< RCU garbage collector
69 typedef T value_type; ///< type of value stored in the set
70 typedef Traits traits; ///< Traits template parameter
72 typedef typename traits::hash_accessor hash_accessor; ///< Hash accessor functor
73 typedef typename base_class::hash_type hash_type; ///< Hash type deduced from \p hash_accessor return type
74 typedef typename traits::disposer disposer; ///< data node disposer
75 typedef typename base_class::hash_comparator hash_comparator; ///< hash compare functor based on \p traits::compare and \p traits::less options
77 typedef typename traits::item_counter item_counter; ///< Item counter type
78 typedef typename traits::node_allocator node_allocator; ///< Array node allocator
79 typedef typename traits::memory_model memory_model; ///< Memory model
80 typedef typename traits::back_off back_off; ///< Backoff strategy
81 typedef typename traits::stat stat; ///< Internal statistics type
82 typedef typename traits::rcu_check_deadlock rcu_check_deadlock; ///< Deadlock checking policy
83 typedef typename gc::scoped_lock rcu_lock; ///< RCU scoped lock
84 static CDS_CONSTEXPR const bool c_bExtractLockExternal = false; ///< Group of \p extract_xxx functions does not require external locking
86 using exempt_ptr = cds::urcu::exempt_ptr< gc, value_type, value_type, disposer, void >; ///< pointer to extracted node
90 typedef typename base_class::node_ptr node_ptr;
91 typedef typename base_class::atomic_node_ptr atomic_node_ptr;
92 typedef typename base_class::array_node array_node;
93 typedef typename base_class::traverse_data traverse_data;
95 using base_class::to_array;
96 using base_class::to_node;
97 using base_class::stats;
98 using base_class::head;
99 using base_class::metrics;
101 typedef cds::urcu::details::check_deadlock_policy< gc, rcu_check_deadlock> check_deadlock_policy;
106 item_counter m_ItemCounter; ///< Item counter
110 /// Creates empty set
112 @param head_bits: 2<sup>head_bits</sup> specifies the size of head array, minimum is 4.
113 @param array_bits: 2<sup>array_bits</sup> specifies the size of array node, minimum is 2.
115 Equation for \p head_bits and \p array_bits:
117 sizeof(hash_type) * 8 == head_bits + N * array_bits
119 where \p N is multi-level array depth.
121 FeldmanHashSet(size_t head_bits = 8, size_t array_bits = 4)
122 : base_class(head_bits, array_bits)
125 /// Destructs the set and frees all data
133 The function inserts \p val in the set if it does not contain
134 an item with that hash.
136 Returns \p true if \p val is placed into the set, \p false otherwise.
138 The function locks RCU internally.
140 bool insert( value_type& val )
142 return insert( val, [](value_type&) {} );
147 This function is intended for derived non-intrusive containers.
149 The function allows to split creating of new item into two part:
150 - create item with key only
151 - insert new item into the set
152 - if inserting is success, calls \p f functor to initialize \p val.
154 The functor signature is:
156 void func( value_type& val );
158 where \p val is the item inserted.
160 The user-defined functor is called only if the inserting is success.
162 The function locks RCU internally.
163 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting".
165 template <typename Func>
166 bool insert( value_type& val, Func f )
168 hash_type const& hash = hash_accessor()( val );
169 traverse_data pos( hash, *this );
173 node_ptr slot = base_class::traverse( pos );
174 assert(slot.bits() == 0);
177 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
179 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
180 // the item with that hash value already exists
181 stats().onInsertFailed();
185 // the slot must be expanded
186 base_class::expand_slot( pos, slot );
189 // the slot is empty, try to insert data node
191 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
193 // the new data node has been inserted
196 stats().onInsertSuccess();
197 stats().height( pos.nHeight );
201 // insert failed - slot has been changed by another thread
203 stats().onInsertRetry();
207 stats().onSlotChanged();
213 Performs inserting or updating the item with hash value equal to \p val.
214 - If hash value is found then existing item is replaced with \p val, old item is disposed
215 with \p Traits::disposer. Note that the disposer is called by \p GC asynchronously.
216 The function returns <tt> std::pair<true, false> </tt>
217 - If hash value is not found and \p bInsert is \p true then \p val is inserted,
218 the function returns <tt> std::pair<true, true> </tt>
219 - If hash value is not found and \p bInsert is \p false then the set is unchanged,
220 the function returns <tt> std::pair<false, false> </tt>
222 Returns <tt> std::pair<bool, bool> </tt> where \p first is \p true if operation is successfull
223 (i.e. the item has been inserted or updated),
224 \p second is \p true if new item has been added or \p false if the set contains that hash.
226 The function locks RCU internally.
228 std::pair<bool, bool> update( value_type& val, bool bInsert = true )
230 return do_update(val, [](value_type&, value_type *) {}, bInsert );
233 /// Unlinks the item \p val from the set
235 The function searches the item \p val in the set and unlink it
236 if it is found and its address is equal to <tt>&val</tt>.
238 The function returns \p true if success and \p false otherwise.
240 RCU should not be locked. The function locks RCU internally.
242 bool unlink( value_type const& val )
244 check_deadlock_policy::check();
246 auto pred = [&val](value_type const& item) -> bool { return &item == &val; };
250 p = do_erase( hash_accessor()( val ), std::ref( pred ));
253 gc::template retire_ptr<disposer>( p );
259 /// Deletes the item from the set
261 The function searches \p hash in the set,
262 unlinks the item found, and returns \p true.
263 If that item is not found the function returns \p false.
265 The \ref disposer specified in \p Traits is called by garbage collector \p GC asynchronously.
267 RCU should not be locked. The function locks RCU internally.
269 bool erase( hash_type const& hash )
271 return erase(hash, [](value_type const&) {} );
274 /// Deletes the item from the set
276 The function searches \p hash in the set,
277 call \p f functor with item found, and unlinks it from the set.
278 The \ref disposer specified in \p Traits is called
279 by garbage collector \p GC asynchronously.
281 The \p Func interface is
284 void operator()( value_type& item );
288 If \p hash is not found the function returns \p false.
290 RCU should not be locked. The function locks RCU internally.
292 template <typename Func>
293 bool erase( hash_type const& hash, Func f )
295 check_deadlock_policy::check();
301 p = do_erase( hash, []( value_type const&) -> bool { return true; } );
304 // p is guarded by HP
307 gc::template retire_ptr<disposer>(p);
313 /// Extracts the item with specified \p hash
315 The function searches \p hash in the set,
316 unlinks it from the set, and returns \ref cds::urcu::exempt_ptr "exempt_ptr" pointer to the item found.
317 If the item with key equal to \p key is not found the function returns an empty \p exempt_ptr.
319 RCU \p synchronize method can be called. RCU should NOT be locked.
320 The function does not call the disposer for the item found.
321 The disposer will be implicitly invoked when the returned object is destroyed or when
322 its \p release() member function is called.
325 typedef cds::intrusive::FeldmanHashSet< cds::urcu::gc< cds::urcu::general_buffered<> >, foo, my_traits > set_type;
329 typename set_type::exempt_ptr ep( theSet.extract( 5 ));
334 // Dispose returned item.
339 exempt_ptr extract( hash_type const& hash )
341 check_deadlock_policy::check();
346 p = do_erase( hash, []( value_type const&) -> bool {return true;} );
348 return exempt_ptr( p );
351 /// Finds an item by it's \p hash
353 The function searches the item by \p hash and calls the functor \p f for item found.
354 The interface of \p Func functor is:
357 void operator()( value_type& item );
360 where \p item is the item found.
362 The functor may change non-key fields of \p item. Note that the functor is only guarantee
363 that \p item cannot be disposed during the functor is executing.
364 The functor does not serialize simultaneous access to the set's \p item. If such access is
365 possible you must provide your own synchronization schema on item level to prevent unsafe item modifications.
367 The function returns \p true if \p hash is found, \p false otherwise.
369 The function applies RCU lock internally.
371 template <typename Func>
372 bool find( hash_type const& hash, Func f )
376 value_type * p = search( hash );
384 /// Checks whether the set contains \p hash
386 The function searches the item by its \p hash
387 and returns \p true if it is found, or \p false otherwise.
389 The function applies RCU lock internally.
391 bool contains( hash_type const& hash )
393 return find( hash, [](value_type&) {} );
396 /// Finds an item by it's \p hash and returns the item found
398 The function searches the item by its \p hash
399 and returns the pointer to the item found.
400 If \p hash is not found the function returns \p nullptr.
402 RCU should be locked before the function invocation.
403 Returned pointer is valid only while RCU is locked.
407 typedef cds::intrusive::FeldmanHashSet< your_template_params > my_set;
414 foo * p = theSet.get( 5 );
422 value_type * get( hash_type const& hash )
424 assert( gc::is_locked());
425 return search( hash );
428 /// Clears the set (non-atomic)
430 The function unlink all data node from the set.
431 The function is not atomic but is thread-safe.
432 After \p %clear() the set may not be empty because another threads may insert items.
434 For each item the \p disposer is called after unlinking.
438 clear_array( head(), head_size());
441 /// Checks if the set is empty
443 Emptiness is checked by item counting: if item count is zero then the set is empty.
444 Thus, the correct item counting feature is an important part of the set implementation.
451 /// Returns item count in the set
454 return m_ItemCounter;
457 /// Returns const reference to internal statistics
458 stat const& statistics() const
463 /// Returns the size of head node
464 using base_class::head_size;
466 /// Returns the size of the array node
467 using base_class::array_node_size;
469 /// Collects tree level statistics into \p stat
470 /** @copydetails cds::intrusive::FeldmanHashSet::get_level_statistics
472 void get_level_statistics(std::vector<feldman_hashset::level_statistics>& stat) const
474 base_class::get_level_statistics(stat);
481 friend class FeldmanHashSet;
484 array_node * m_pNode; ///< current array node
485 size_t m_idx; ///< current position in m_pNode
486 value_type * m_pValue; ///< current value
487 FeldmanHashSet const* m_set; ///< Hash set
490 iterator_base() CDS_NOEXCEPT
497 iterator_base(iterator_base const& rhs) CDS_NOEXCEPT
498 : m_pNode(rhs.m_pNode)
500 , m_pValue(rhs.m_pValue)
504 iterator_base& operator=(iterator_base const& rhs) CDS_NOEXCEPT
506 m_pNode = rhs.m_pNode;
508 m_pValue = rhs.m_pValue;
513 iterator_base& operator++()
519 iterator_base& operator--()
525 bool operator ==(iterator_base const& rhs) const CDS_NOEXCEPT
527 return m_pNode == rhs.m_pNode && m_idx == rhs.m_idx && m_set == rhs.m_set;
530 bool operator !=(iterator_base const& rhs) const CDS_NOEXCEPT
532 return !(*this == rhs);
536 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx, bool)
543 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx)
552 value_type * pointer() const CDS_NOEXCEPT
554 assert(gc::is_locked());
560 assert( gc::is_locked());
561 assert(m_set != nullptr);
562 assert(m_pNode != nullptr);
564 size_t const arrayNodeSize = m_set->array_node_size();
565 size_t const headSize = m_set->head_size();
566 array_node * pNode = m_pNode;
567 size_t idx = m_idx + 1;
568 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
571 if (idx < nodeSize) {
572 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
573 if (slot.bits() == base_class::flag_array_node ) {
574 // array node, go down the tree
575 assert(slot.ptr() != nullptr);
576 pNode = to_array(slot.ptr());
578 nodeSize = arrayNodeSize;
580 else if (slot.bits() == base_class::flag_array_converting ) {
581 // the slot is converting to array node right now - skip the node
589 m_pValue = slot.ptr();
597 if (pNode->pParent) {
598 idx = pNode->idxParent + 1;
599 pNode = pNode->pParent;
600 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
604 assert(pNode == m_set->head());
605 assert(idx == headSize);
617 assert(gc::is_locked());
618 assert(m_set != nullptr);
619 assert(m_pNode != nullptr);
621 size_t const arrayNodeSize = m_set->array_node_size();
622 size_t const headSize = m_set->head_size();
623 size_t const endIdx = size_t(0) - 1;
625 array_node * pNode = m_pNode;
626 size_t idx = m_idx - 1;
627 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
631 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
632 if (slot.bits() == base_class::flag_array_node ) {
633 // array node, go down the tree
634 assert(slot.ptr() != nullptr);
635 pNode = to_array(slot.ptr());
636 nodeSize = arrayNodeSize;
639 else if (slot.bits() == base_class::flag_array_converting ) {
640 // the slot is converting to array node right now - skip the node
648 m_pValue = slot.ptr();
656 if (pNode->pParent) {
657 idx = pNode->idxParent - 1;
658 pNode = pNode->pParent;
659 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
663 assert(pNode == m_set->head());
664 assert(idx == endIdx);
675 template <class Iterator>
676 Iterator init_begin() const
678 return Iterator(*this, head(), size_t(0) - 1);
681 template <class Iterator>
682 Iterator init_end() const
684 return Iterator(*this, head(), head_size(), false);
687 template <class Iterator>
688 Iterator init_rbegin() const
690 return Iterator(*this, head(), head_size());
693 template <class Iterator>
694 Iterator init_rend() const
696 return Iterator(*this, head(), size_t(0) - 1, false);
699 /// Bidirectional iterator class
700 template <bool IsConst>
701 class bidirectional_iterator : protected iterator_base
703 friend class FeldmanHashSet;
706 static CDS_CONSTEXPR bool const c_bConstantIterator = IsConst;
709 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
710 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
713 bidirectional_iterator() CDS_NOEXCEPT
716 bidirectional_iterator(bidirectional_iterator const& rhs) CDS_NOEXCEPT
720 bidirectional_iterator& operator=(bidirectional_iterator const& rhs) CDS_NOEXCEPT
722 iterator_base::operator=(rhs);
726 bidirectional_iterator& operator++()
728 iterator_base::operator++();
732 bidirectional_iterator& operator--()
734 iterator_base::operator--();
738 value_ptr operator ->() const CDS_NOEXCEPT
740 return iterator_base::pointer();
743 value_ref operator *() const CDS_NOEXCEPT
745 value_ptr p = iterator_base::pointer();
750 template <bool IsConst2>
751 bool operator ==(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
753 return iterator_base::operator==(rhs);
756 template <bool IsConst2>
757 bool operator !=(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
759 return !(*this == rhs);
763 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
764 : iterator_base(set, pNode, idx, false)
767 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
768 : iterator_base(set, pNode, idx)
772 /// Reverse bidirectional iterator
773 template <bool IsConst>
774 class reverse_bidirectional_iterator : public iterator_base
776 friend class FeldmanHashSet;
779 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
780 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
783 reverse_bidirectional_iterator() CDS_NOEXCEPT
787 reverse_bidirectional_iterator(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
791 reverse_bidirectional_iterator& operator=(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
793 iterator_base::operator=(rhs);
797 reverse_bidirectional_iterator& operator++()
799 iterator_base::operator--();
803 reverse_bidirectional_iterator& operator--()
805 iterator_base::operator++();
809 value_ptr operator ->() const CDS_NOEXCEPT
811 return iterator_base::pointer();
814 value_ref operator *() const CDS_NOEXCEPT
816 value_ptr p = iterator_base::pointer();
821 template <bool IsConst2>
822 bool operator ==(reverse_bidirectional_iterator<IsConst2> const& rhs) const
824 return iterator_base::operator==(rhs);
827 template <bool IsConst2>
828 bool operator !=(reverse_bidirectional_iterator<IsConst2> const& rhs)
830 return !(*this == rhs);
834 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
835 : iterator_base(set, pNode, idx, false)
838 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
839 : iterator_base(set, pNode, idx, false)
841 iterator_base::backward();
847 #ifdef CDS_DOXYGEN_INVOKED
848 typedef implementation_defined iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional iterator" type
849 typedef implementation_defined const_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional const iterator" type
850 typedef implementation_defined reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse iterator" type
851 typedef implementation_defined const_reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse const iterator" type
853 typedef bidirectional_iterator<false> iterator;
854 typedef bidirectional_iterator<true> const_iterator;
855 typedef reverse_bidirectional_iterator<false> reverse_iterator;
856 typedef reverse_bidirectional_iterator<true> const_reverse_iterator;
859 ///@name Thread-safe iterators
860 /** @anchor cds_intrusive_FeldmanHashSet_rcu_iterators
861 The set supports thread-safe iterators: you may iterate over the set in multi-threaded environment
862 under explicit RCU lock.
863 RCU lock requirement means that inserting or searching is allowed but you must not erase the items from the set
864 since erasing under RCU lock can lead to a deadlock. However, another thread can call \p erase() safely
865 while your thread is iterating.
867 A typical example is:
872 uint32_t payload; // only for example
874 struct set_traits: cds::intrusive::feldman_hashset::traits
876 struct hash_accessor {
877 uint32_t operator()( foo const& src ) const
884 typedef cds::urcu::gc< cds::urcu::general_buffered<>> rcu;
885 typedef cds::intrusive::FeldmanHashSet< rcu, foo, set_traits > set_type;
891 // iterate over the set
894 typename set_type::rcu_lock l; // scoped RCU lock
897 for ( auto i = s.begin(); i != s.end(); ++i ) {
898 // deal with i. Remember, erasing is prohibited here!
901 } // at this point RCU lock is released
904 Each iterator object supports the common interface:
905 - dereference operators:
907 value_type [const] * operator ->() noexcept
908 value_type [const] & operator *() noexcept
910 - pre-increment and pre-decrement. Post-operators is not supported
911 - equality operators <tt>==</tt> and <tt>!=</tt>.
912 Iterators are equal iff they point to the same cell of the same array node.
913 Note that for two iterators \p it1 and \p it2 the condition <tt> it1 == it2 </tt>
914 does not entail <tt> &(*it1) == &(*it2) </tt>: welcome to concurrent containers
916 @note It is possible the item can be iterated more that once, for example, if an iterator points to the item
917 in an array node that is being splitted.
921 /// Returns an iterator to the beginning of the set
924 return iterator(*this, head(), size_t(0) - 1);
927 /// Returns an const iterator to the beginning of the set
928 const_iterator begin() const
930 return const_iterator(*this, head(), size_t(0) - 1);
933 /// Returns an const iterator to the beginning of the set
934 const_iterator cbegin()
936 return const_iterator(*this, head(), size_t(0) - 1);
939 /// Returns an iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
942 return iterator(*this, head(), head_size(), false);
945 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
946 const_iterator end() const
948 return const_iterator(*this, head(), head_size(), false);
951 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
952 const_iterator cend()
954 return const_iterator(*this, head(), head_size(), false);
957 /// Returns a reverse iterator to the first element of the reversed set
958 reverse_iterator rbegin()
960 return reverse_iterator(*this, head(), head_size());
963 /// Returns a const reverse iterator to the first element of the reversed set
964 const_reverse_iterator rbegin() const
966 return const_reverse_iterator(*this, head(), head_size());
969 /// Returns a const reverse iterator to the first element of the reversed set
970 const_reverse_iterator crbegin()
972 return const_reverse_iterator(*this, head(), head_size());
975 /// Returns a reverse iterator to the element following the last element of the reversed set
977 It corresponds to the element preceding the first element of the non-reversed container.
978 This element acts as a placeholder, attempting to access it results in undefined behavior.
980 reverse_iterator rend()
982 return reverse_iterator(*this, head(), size_t(0) - 1, false);
985 /// Returns a const reverse iterator to the element following the last element of the reversed set
987 It corresponds to the element preceding the first element of the non-reversed container.
988 This element acts as a placeholder, attempting to access it results in undefined behavior.
990 const_reverse_iterator rend() const
992 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
995 /// Returns a const reverse iterator to the element following the last element of the reversed set
997 It corresponds to the element preceding the first element of the non-reversed container.
998 This element acts as a placeholder, attempting to access it results in undefined behavior.
1000 const_reverse_iterator crend()
1002 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
1008 template <typename Func>
1009 std::pair<bool, bool> do_update(value_type& val, Func f, bool bInsert = true)
1011 hash_type const& hash = hash_accessor()(val);
1012 traverse_data pos( hash, *this );
1013 hash_comparator cmp;
1017 node_ptr slot = base_class::traverse( pos );
1018 assert(slot.bits() == 0);
1024 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
1026 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1027 // the item with that hash value already exists
1028 // Replace it with val
1029 if ( slot.ptr() == &val ) {
1030 stats().onUpdateExisting();
1031 return std::make_pair(true, false);
1034 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed)) {
1035 // slot can be disposed
1036 f( val, slot.ptr());
1038 stats().onUpdateExisting();
1039 goto update_existing_done;
1042 stats().onUpdateRetry();
1046 // the slot must be expanded
1047 base_class::expand_slot( pos, slot );
1050 stats().onUpdateFailed();
1051 return std::make_pair(false, false);
1056 // the slot is empty, try to insert data node
1059 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
1061 // the new data node has been inserted
1064 stats().onUpdateNew();
1065 stats().height( pos.nHeight );
1066 return std::make_pair(true, true);
1070 stats().onUpdateFailed();
1071 return std::make_pair(false, false);
1074 // insert failed - slot has been changed by another thread
1076 stats().onUpdateRetry();
1080 stats().onSlotChanged();
1085 // retire_ptr must be called only outside of RCU lock
1086 update_existing_done:
1088 gc::template retire_ptr<disposer>(pOld);
1089 return std::make_pair(true, false);
1092 template <typename Predicate>
1093 value_type * do_erase( hash_type const& hash, Predicate pred)
1095 assert(gc::is_locked());
1096 traverse_data pos( hash, *this );
1097 hash_comparator cmp;
1100 node_ptr slot = base_class::traverse( pos );
1101 assert( slot.bits() == 0 );
1103 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) == slot ) {
1105 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 && pred( *slot.ptr())) {
1106 // item found - replace it with nullptr
1107 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong( slot, node_ptr( nullptr ), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1109 stats().onEraseSuccess();
1113 stats().onEraseRetry();
1116 stats().onEraseFailed();
1120 // the slot is empty
1121 stats().onEraseFailed();
1126 stats().onSlotChanged();
1130 value_type * search(hash_type const& hash )
1132 assert( gc::is_locked());
1133 traverse_data pos( hash, *this );
1134 hash_comparator cmp;
1137 node_ptr slot = base_class::traverse( pos );
1138 assert( slot.bits() == 0 );
1140 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) != slot ) {
1141 // slot value has been changed - retry
1142 stats().onSlotChanged();
1145 else if ( slot.ptr() && cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1147 stats().onFindSuccess();
1150 stats().onFindFailed();
1159 void clear_array(array_node * pArrNode, size_t nSize)
1164 for (atomic_node_ptr * pArr = pArrNode->nodes, *pLast = pArr + nSize; pArr != pLast; ++pArr) {
1166 node_ptr slot = pArr->load(memory_model::memory_order_acquire);
1167 if (slot.bits() == base_class::flag_array_node ) {
1168 // array node, go down the tree
1169 assert(slot.ptr() != nullptr);
1170 clear_array(to_array(slot.ptr()), array_node_size());
1173 else if (slot.bits() == base_class::flag_array_converting ) {
1174 // the slot is converting to array node right now
1175 while ((slot = pArr->load(memory_model::memory_order_acquire)).bits() == base_class::flag_array_converting ) {
1177 stats().onSlotConverting();
1181 assert(slot.ptr() != nullptr);
1182 assert(slot.bits() == base_class::flag_array_node );
1183 clear_array(to_array(slot.ptr()), array_node_size());
1188 if (pArr->compare_exchange_strong(slot, node_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1190 gc::template retire_ptr<disposer>(slot.ptr());
1192 stats().onEraseSuccess();
1203 }} // namespace cds::intrusive
1205 #endif // #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H