3 #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
4 #define CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
6 #include <functional> // std::ref
7 #include <iterator> // std::iterator_traits
10 #include <cds/intrusive/details/feldman_hashset_base.h>
11 #include <cds/details/allocator.h>
12 #include <cds/urcu/details/check_deadlock.h>
13 #include <cds/urcu/exempt_ptr.h>
14 #include <cds/intrusive/details/raw_ptr_disposer.h>
17 namespace cds { namespace intrusive {
18 /// Intrusive hash set based on multi-level array, \ref cds_urcu_desc "RCU" specialization
19 /** @ingroup cds_intrusive_map
20 @anchor cds_intrusive_FeldmanHashSet_rcu
23 - [2013] Steven Feldman, Pierre LaBorde, Damian Dechev "Concurrent Multi-level Arrays:
24 Wait-free Extensible Hash Maps"
26 See algorithm short description @ref cds_intrusive_FeldmanHashSet_hp "here"
28 @note Two important things you should keep in mind when you're using \p %FeldmanHashSet:
29 - all keys must be fixed-size. It means that you cannot use \p std::string as a key for \p %FeldmanHashSet.
30 Instead, for the strings you should use well-known hashing algorithms like <a href="https://en.wikipedia.org/wiki/Secure_Hash_Algorithm">SHA1, SHA2</a>,
31 <a href="https://en.wikipedia.org/wiki/MurmurHash">MurmurHash</a>, <a href="https://en.wikipedia.org/wiki/CityHash">CityHash</a>
32 or its successor <a href="https://code.google.com/p/farmhash/">FarmHash</a> and so on, which
33 converts variable-length strings to fixed-length bit-strings, and use that hash as a key in \p %FeldmanHashSet.
34 - \p %FeldmanHashSet uses a perfect hashing. It means that if two different keys, for example, of type \p std::string,
35 have identical hash then you cannot insert both that keys in the set. \p %FeldmanHashSet does not maintain the key,
36 it maintains its fixed-size hash value.
39 - \p RCU - one of \ref cds_urcu_gc "RCU type"
40 - \p T - a value type to be stored in the set
41 - \p Traits - type traits, the structure based on \p feldman_hashset::traits or result of \p feldman_hashset::make_traits metafunction.
42 \p Traits is the mandatory argument because it has one mandatory type - an @ref feldman_hashset::traits::hash_accessor "accessor"
43 to hash value of \p T. The set algorithm does not calculate that hash value.
45 @note Before including <tt><cds/intrusive/feldman_hashset_rcu.h></tt> you should include appropriate RCU header file,
46 see \ref cds_urcu_gc "RCU type" for list of existing RCU class and corresponding header files.
48 The set supports @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional thread-safe iterators"
49 with some restrictions.
54 #ifdef CDS_DOXYGEN_INVOKED
55 class Traits = feldman_hashset::traits
60 class FeldmanHashSet< cds::urcu::gc< RCU >, T, Traits >: protected feldman_hashset::multilevel_array<T, Traits>
63 typedef feldman_hashset::multilevel_array<T, Traits> base_class;
67 typedef cds::urcu::gc< RCU > gc; ///< RCU garbage collector
68 typedef T value_type; ///< type of value stored in the set
69 typedef Traits traits; ///< Traits template parameter
71 typedef typename traits::hash_accessor hash_accessor; ///< Hash accessor functor
72 typedef typename base_class::hash_type hash_type; ///< Hash type deduced from \p hash_accessor return type
73 typedef typename traits::disposer disposer; ///< data node disposer
74 typedef typename base_class::hash_comparator hash_comparator; ///< hash compare functor based on \p traits::compare and \p traits::less options
76 typedef typename traits::item_counter item_counter; ///< Item counter type
77 typedef typename traits::node_allocator node_allocator; ///< Array node allocator
78 typedef typename traits::memory_model memory_model; ///< Memory model
79 typedef typename traits::back_off back_off; ///< Backoff strategy
80 typedef typename traits::stat stat; ///< Internal statistics type
81 typedef typename traits::rcu_check_deadlock rcu_check_deadlock; ///< Deadlock checking policy
82 typedef typename gc::scoped_lock rcu_lock; ///< RCU scoped lock
83 static CDS_CONSTEXPR const bool c_bExtractLockExternal = false; ///< Group of \p extract_xxx functions does not require external locking
85 using exempt_ptr = cds::urcu::exempt_ptr< gc, value_type, value_type, disposer, void >; ///< pointer to extracted node
89 typedef typename base_class::node_ptr node_ptr;
90 typedef typename base_class::atomic_node_ptr atomic_node_ptr;
91 typedef typename base_class::array_node array_node;
92 typedef typename base_class::traverse_data traverse_data;
94 using base_class::to_array;
95 using base_class::to_node;
96 using base_class::stats;
97 using base_class::head;
98 using base_class::metrics;
100 typedef cds::urcu::details::check_deadlock_policy< gc, rcu_check_deadlock> check_deadlock_policy;
105 item_counter m_ItemCounter; ///< Item counter
109 /// Creates empty set
111 @param head_bits: 2<sup>head_bits</sup> specifies the size of head array, minimum is 4.
112 @param array_bits: 2<sup>array_bits</sup> specifies the size of array node, minimum is 2.
114 Equation for \p head_bits and \p array_bits:
116 sizeof(hash_type) * 8 == head_bits + N * array_bits
118 where \p N is multi-level array depth.
120 FeldmanHashSet(size_t head_bits = 8, size_t array_bits = 4)
121 : base_class(head_bits, array_bits)
124 /// Destructs the set and frees all data
132 The function inserts \p val in the set if it does not contain
133 an item with that hash.
135 Returns \p true if \p val is placed into the set, \p false otherwise.
137 The function locks RCU internally.
139 bool insert( value_type& val )
141 return insert( val, [](value_type&) {} );
146 This function is intended for derived non-intrusive containers.
148 The function allows to split creating of new item into two part:
149 - create item with key only
150 - insert new item into the set
151 - if inserting is success, calls \p f functor to initialize \p val.
153 The functor signature is:
155 void func( value_type& val );
157 where \p val is the item inserted.
159 The user-defined functor is called only if the inserting is success.
161 The function locks RCU internally.
162 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting".
164 template <typename Func>
165 bool insert( value_type& val, Func f )
167 hash_type const& hash = hash_accessor()( val );
168 traverse_data pos( hash, *this );
172 node_ptr slot = base_class::traverse( pos );
173 assert(slot.bits() == 0);
176 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
178 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
179 // the item with that hash value already exists
180 stats().onInsertFailed();
184 // the slot must be expanded
185 base_class::expand_slot( pos, slot );
188 // the slot is empty, try to insert data node
190 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
192 // the new data node has been inserted
195 stats().onInsertSuccess();
196 stats().height( pos.nHeight );
200 // insert failed - slot has been changed by another thread
202 stats().onInsertRetry();
206 stats().onSlotChanged();
212 Performs inserting or updating the item with hash value equal to \p val.
213 - If hash value is found then existing item is replaced with \p val, old item is disposed
214 with \p Traits::disposer. Note that the disposer is called by \p GC asynchronously.
215 The function returns <tt> std::pair<true, false> </tt>
216 - If hash value is not found and \p bInsert is \p true then \p val is inserted,
217 the function returns <tt> std::pair<true, true> </tt>
218 - If hash value is not found and \p bInsert is \p false then the set is unchanged,
219 the function returns <tt> std::pair<false, false> </tt>
221 Returns <tt> std::pair<bool, bool> </tt> where \p first is \p true if operation is successfull
222 (i.e. the item has been inserted or updated),
223 \p second is \p true if new item has been added or \p false if the set contains that hash.
225 The function locks RCU internally.
227 std::pair<bool, bool> update( value_type& val, bool bInsert = true )
229 return do_update(val, [](value_type&, value_type *) {}, bInsert );
232 /// Unlinks the item \p val from the set
234 The function searches the item \p val in the set and unlink it
235 if it is found and its address is equal to <tt>&val</tt>.
237 The function returns \p true if success and \p false otherwise.
239 RCU should not be locked. The function locks RCU internally.
241 bool unlink( value_type const& val )
243 check_deadlock_policy::check();
245 auto pred = [&val](value_type const& item) -> bool { return &item == &val; };
249 p = do_erase( hash_accessor()( val ), std::ref( pred ));
252 gc::template retire_ptr<disposer>( p );
258 /// Deletes the item from the set
260 The function searches \p hash in the set,
261 unlinks the item found, and returns \p true.
262 If that item is not found the function returns \p false.
264 The \ref disposer specified in \p Traits is called by garbage collector \p GC asynchronously.
266 RCU should not be locked. The function locks RCU internally.
268 bool erase( hash_type const& hash )
270 return erase(hash, [](value_type const&) {} );
273 /// Deletes the item from the set
275 The function searches \p hash in the set,
276 call \p f functor with item found, and unlinks it from the set.
277 The \ref disposer specified in \p Traits is called
278 by garbage collector \p GC asynchronously.
280 The \p Func interface is
283 void operator()( value_type& item );
287 If \p hash is not found the function returns \p false.
289 RCU should not be locked. The function locks RCU internally.
291 template <typename Func>
292 bool erase( hash_type const& hash, Func f )
294 check_deadlock_policy::check();
300 p = do_erase( hash, []( value_type const&) -> bool { return true; } );
303 // p is guarded by HP
306 gc::template retire_ptr<disposer>(p);
312 /// Extracts the item with specified \p hash
314 The function searches \p hash in the set,
315 unlinks it from the set, and returns \ref cds::urcu::exempt_ptr "exempt_ptr" pointer to the item found.
316 If the item with key equal to \p key is not found the function returns an empty \p exempt_ptr.
318 RCU \p synchronize method can be called. RCU should NOT be locked.
319 The function does not call the disposer for the item found.
320 The disposer will be implicitly invoked when the returned object is destroyed or when
321 its \p release() member function is called.
324 typedef cds::intrusive::FeldmanHashSet< cds::urcu::gc< cds::urcu::general_buffered<> >, foo, my_traits > set_type;
328 typename set_type::exempt_ptr ep( theSet.extract( 5 ));
333 // Dispose returned item.
338 exempt_ptr extract( hash_type const& hash )
340 check_deadlock_policy::check();
345 p = do_erase( hash, []( value_type const&) -> bool {return true;} );
347 return exempt_ptr( p );
350 /// Finds an item by it's \p hash
352 The function searches the item by \p hash and calls the functor \p f for item found.
353 The interface of \p Func functor is:
356 void operator()( value_type& item );
359 where \p item is the item found.
361 The functor may change non-key fields of \p item. Note that the functor is only guarantee
362 that \p item cannot be disposed during the functor is executing.
363 The functor does not serialize simultaneous access to the set's \p item. If such access is
364 possible you must provide your own synchronization schema on item level to prevent unsafe item modifications.
366 The function returns \p true if \p hash is found, \p false otherwise.
368 The function applies RCU lock internally.
370 template <typename Func>
371 bool find( hash_type const& hash, Func f )
375 value_type * p = search( hash );
383 /// Checks whether the set contains \p hash
385 The function searches the item by its \p hash
386 and returns \p true if it is found, or \p false otherwise.
388 The function applies RCU lock internally.
390 bool contains( hash_type const& hash )
392 return find( hash, [](value_type&) {} );
395 /// Finds an item by it's \p hash and returns the item found
397 The function searches the item by its \p hash
398 and returns the pointer to the item found.
399 If \p hash is not found the function returns \p nullptr.
401 RCU should be locked before the function invocation.
402 Returned pointer is valid only while RCU is locked.
406 typedef cds::intrusive::FeldmanHashSet< your_template_params > my_set;
413 foo * p = theSet.get( 5 );
421 value_type * get( hash_type const& hash )
423 assert( gc::is_locked());
424 return search( hash );
427 /// Clears the set (non-atomic)
429 The function unlink all data node from the set.
430 The function is not atomic but is thread-safe.
431 After \p %clear() the set may not be empty because another threads may insert items.
433 For each item the \p disposer is called after unlinking.
437 clear_array( head(), head_size());
440 /// Checks if the set is empty
442 Emptiness is checked by item counting: if item count is zero then the set is empty.
443 Thus, the correct item counting feature is an important part of the set implementation.
450 /// Returns item count in the set
453 return m_ItemCounter;
456 /// Returns const reference to internal statistics
457 stat const& statistics() const
462 /// Returns the size of head node
463 using base_class::head_size;
465 /// Returns the size of the array node
466 using base_class::array_node_size;
468 /// Collects tree level statistics into \p stat
469 /** @copydetails cds::intrusive::FeldmanHashSet::get_level_statistics
471 void get_level_statistics(std::vector<feldman_hashset::level_statistics>& stat) const
473 base_class::get_level_statistics(stat);
480 friend class FeldmanHashSet;
483 array_node * m_pNode; ///< current array node
484 size_t m_idx; ///< current position in m_pNode
485 value_type * m_pValue; ///< current value
486 FeldmanHashSet const* m_set; ///< Hash set
489 iterator_base() CDS_NOEXCEPT
496 iterator_base(iterator_base const& rhs) CDS_NOEXCEPT
497 : m_pNode(rhs.m_pNode)
499 , m_pValue(rhs.m_pValue)
503 iterator_base& operator=(iterator_base const& rhs) CDS_NOEXCEPT
505 m_pNode = rhs.m_pNode;
507 m_pValue = rhs.m_pValue;
512 iterator_base& operator++()
518 iterator_base& operator--()
524 bool operator ==(iterator_base const& rhs) const CDS_NOEXCEPT
526 return m_pNode == rhs.m_pNode && m_idx == rhs.m_idx && m_set == rhs.m_set;
529 bool operator !=(iterator_base const& rhs) const CDS_NOEXCEPT
531 return !(*this == rhs);
535 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx, bool)
542 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx)
551 value_type * pointer() const CDS_NOEXCEPT
553 assert(gc::is_locked());
559 assert( gc::is_locked());
560 assert(m_set != nullptr);
561 assert(m_pNode != nullptr);
563 size_t const arrayNodeSize = m_set->array_node_size();
564 size_t const headSize = m_set->head_size();
565 array_node * pNode = m_pNode;
566 size_t idx = m_idx + 1;
567 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
570 if (idx < nodeSize) {
571 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
572 if (slot.bits() == base_class::flag_array_node ) {
573 // array node, go down the tree
574 assert(slot.ptr() != nullptr);
575 pNode = to_array(slot.ptr());
577 nodeSize = arrayNodeSize;
579 else if (slot.bits() == base_class::flag_array_converting ) {
580 // the slot is converting to array node right now - skip the node
588 m_pValue = slot.ptr();
596 if (pNode->pParent) {
597 idx = pNode->idxParent + 1;
598 pNode = pNode->pParent;
599 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
603 assert(pNode == m_set->head());
604 assert(idx == headSize);
616 assert(gc::is_locked());
617 assert(m_set != nullptr);
618 assert(m_pNode != nullptr);
620 size_t const arrayNodeSize = m_set->array_node_size();
621 size_t const headSize = m_set->head_size();
622 size_t const endIdx = size_t(0) - 1;
624 array_node * pNode = m_pNode;
625 size_t idx = m_idx - 1;
626 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
630 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
631 if (slot.bits() == base_class::flag_array_node ) {
632 // array node, go down the tree
633 assert(slot.ptr() != nullptr);
634 pNode = to_array(slot.ptr());
635 nodeSize = arrayNodeSize;
638 else if (slot.bits() == base_class::flag_array_converting ) {
639 // the slot is converting to array node right now - skip the node
647 m_pValue = slot.ptr();
655 if (pNode->pParent) {
656 idx = pNode->idxParent - 1;
657 pNode = pNode->pParent;
658 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
662 assert(pNode == m_set->head());
663 assert(idx == endIdx);
674 template <class Iterator>
675 Iterator init_begin() const
677 return Iterator(*this, head(), size_t(0) - 1);
680 template <class Iterator>
681 Iterator init_end() const
683 return Iterator(*this, head(), head_size(), false);
686 template <class Iterator>
687 Iterator init_rbegin() const
689 return Iterator(*this, head(), head_size());
692 template <class Iterator>
693 Iterator init_rend() const
695 return Iterator(*this, head(), size_t(0) - 1, false);
698 /// Bidirectional iterator class
699 template <bool IsConst>
700 class bidirectional_iterator : protected iterator_base
702 friend class FeldmanHashSet;
705 static CDS_CONSTEXPR bool const c_bConstantIterator = IsConst;
708 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
709 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
712 bidirectional_iterator() CDS_NOEXCEPT
715 bidirectional_iterator(bidirectional_iterator const& rhs) CDS_NOEXCEPT
719 bidirectional_iterator& operator=(bidirectional_iterator const& rhs) CDS_NOEXCEPT
721 iterator_base::operator=(rhs);
725 bidirectional_iterator& operator++()
727 iterator_base::operator++();
731 bidirectional_iterator& operator--()
733 iterator_base::operator--();
737 value_ptr operator ->() const CDS_NOEXCEPT
739 return iterator_base::pointer();
742 value_ref operator *() const CDS_NOEXCEPT
744 value_ptr p = iterator_base::pointer();
749 template <bool IsConst2>
750 bool operator ==(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
752 return iterator_base::operator==(rhs);
755 template <bool IsConst2>
756 bool operator !=(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
758 return !(*this == rhs);
762 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
763 : iterator_base(set, pNode, idx, false)
766 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
767 : iterator_base(set, pNode, idx)
771 /// Reverse bidirectional iterator
772 template <bool IsConst>
773 class reverse_bidirectional_iterator : public iterator_base
775 friend class FeldmanHashSet;
778 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
779 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
782 reverse_bidirectional_iterator() CDS_NOEXCEPT
786 reverse_bidirectional_iterator(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
790 reverse_bidirectional_iterator& operator=(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
792 iterator_base::operator=(rhs);
796 reverse_bidirectional_iterator& operator++()
798 iterator_base::operator--();
802 reverse_bidirectional_iterator& operator--()
804 iterator_base::operator++();
808 value_ptr operator ->() const CDS_NOEXCEPT
810 return iterator_base::pointer();
813 value_ref operator *() const CDS_NOEXCEPT
815 value_ptr p = iterator_base::pointer();
820 template <bool IsConst2>
821 bool operator ==(reverse_bidirectional_iterator<IsConst2> const& rhs) const
823 return iterator_base::operator==(rhs);
826 template <bool IsConst2>
827 bool operator !=(reverse_bidirectional_iterator<IsConst2> const& rhs)
829 return !(*this == rhs);
833 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
834 : iterator_base(set, pNode, idx, false)
837 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
838 : iterator_base(set, pNode, idx, false)
840 iterator_base::backward();
846 #ifdef CDS_DOXYGEN_INVOKED
847 typedef implementation_defined iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional iterator" type
848 typedef implementation_defined const_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional const iterator" type
849 typedef implementation_defined reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse iterator" type
850 typedef implementation_defined const_reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse const iterator" type
852 typedef bidirectional_iterator<false> iterator;
853 typedef bidirectional_iterator<true> const_iterator;
854 typedef reverse_bidirectional_iterator<false> reverse_iterator;
855 typedef reverse_bidirectional_iterator<true> const_reverse_iterator;
858 ///@name Thread-safe iterators
859 /** @anchor cds_intrusive_FeldmanHashSet_rcu_iterators
860 The set supports thread-safe iterators: you may iterate over the set in multi-threaded environment
861 under explicit RCU lock.
862 RCU lock requirement means that inserting or searching is allowed but you must not erase the items from the set
863 since erasing under RCU lock can lead to a deadlock. However, another thread can call \p erase() safely
864 while your thread is iterating.
866 A typical example is:
871 uint32_t payload; // only for example
873 struct set_traits: cds::intrusive::feldman_hashset::traits
875 struct hash_accessor {
876 uint32_t operator()( foo const& src ) const
883 typedef cds::urcu::gc< cds::urcu::general_buffered<>> rcu;
884 typedef cds::intrusive::FeldmanHashSet< rcu, foo, set_traits > set_type;
890 // iterate over the set
893 typename set_type::rcu_lock l; // scoped RCU lock
896 for ( auto i = s.begin(); i != s.end(); ++i ) {
897 // deal with i. Remember, erasing is prohibited here!
900 } // at this point RCU lock is released
903 Each iterator object supports the common interface:
904 - dereference operators:
906 value_type [const] * operator ->() noexcept
907 value_type [const] & operator *() noexcept
909 - pre-increment and pre-decrement. Post-operators is not supported
910 - equality operators <tt>==</tt> and <tt>!=</tt>.
911 Iterators are equal iff they point to the same cell of the same array node.
912 Note that for two iterators \p it1 and \p it2 the condition <tt> it1 == it2 </tt>
913 does not entail <tt> &(*it1) == &(*it2) </tt>: welcome to concurrent containers
915 @note It is possible the item can be iterated more that once, for example, if an iterator points to the item
916 in an array node that is being splitted.
920 /// Returns an iterator to the beginning of the set
923 return iterator(*this, head(), size_t(0) - 1);
926 /// Returns an const iterator to the beginning of the set
927 const_iterator begin() const
929 return const_iterator(*this, head(), size_t(0) - 1);
932 /// Returns an const iterator to the beginning of the set
933 const_iterator cbegin()
935 return const_iterator(*this, head(), size_t(0) - 1);
938 /// Returns an iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
941 return iterator(*this, head(), head_size(), false);
944 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
945 const_iterator end() const
947 return const_iterator(*this, head(), head_size(), false);
950 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
951 const_iterator cend()
953 return const_iterator(*this, head(), head_size(), false);
956 /// Returns a reverse iterator to the first element of the reversed set
957 reverse_iterator rbegin()
959 return reverse_iterator(*this, head(), head_size());
962 /// Returns a const reverse iterator to the first element of the reversed set
963 const_reverse_iterator rbegin() const
965 return const_reverse_iterator(*this, head(), head_size());
968 /// Returns a const reverse iterator to the first element of the reversed set
969 const_reverse_iterator crbegin()
971 return const_reverse_iterator(*this, head(), head_size());
974 /// Returns a reverse iterator to the element following the last element of the reversed set
976 It corresponds to the element preceding the first element of the non-reversed container.
977 This element acts as a placeholder, attempting to access it results in undefined behavior.
979 reverse_iterator rend()
981 return reverse_iterator(*this, head(), size_t(0) - 1, false);
984 /// Returns a const reverse iterator to the element following the last element of the reversed set
986 It corresponds to the element preceding the first element of the non-reversed container.
987 This element acts as a placeholder, attempting to access it results in undefined behavior.
989 const_reverse_iterator rend() const
991 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
994 /// Returns a const reverse iterator to the element following the last element of the reversed set
996 It corresponds to the element preceding the first element of the non-reversed container.
997 This element acts as a placeholder, attempting to access it results in undefined behavior.
999 const_reverse_iterator crend()
1001 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
1007 template <typename Func>
1008 std::pair<bool, bool> do_update(value_type& val, Func f, bool bInsert = true)
1010 hash_type const& hash = hash_accessor()(val);
1011 traverse_data pos( hash, *this );
1012 hash_comparator cmp;
1016 node_ptr slot = base_class::traverse( pos );
1017 assert(slot.bits() == 0);
1023 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
1025 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1026 // the item with that hash value already exists
1027 // Replace it with val
1028 if ( slot.ptr() == &val ) {
1029 stats().onUpdateExisting();
1030 return std::make_pair(true, false);
1033 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed)) {
1034 // slot can be disposed
1035 f( val, slot.ptr());
1037 stats().onUpdateExisting();
1038 goto update_existing_done;
1041 stats().onUpdateRetry();
1045 // the slot must be expanded
1046 base_class::expand_slot( pos, slot );
1049 stats().onUpdateFailed();
1050 return std::make_pair(false, false);
1055 // the slot is empty, try to insert data node
1058 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
1060 // the new data node has been inserted
1063 stats().onUpdateNew();
1064 stats().height( pos.nHeight );
1065 return std::make_pair(true, true);
1069 stats().onUpdateFailed();
1070 return std::make_pair(false, false);
1073 // insert failed - slot has been changed by another thread
1075 stats().onUpdateRetry();
1079 stats().onSlotChanged();
1084 // retire_ptr must be called only outside of RCU lock
1085 update_existing_done:
1087 gc::template retire_ptr<disposer>(pOld);
1088 return std::make_pair(true, false);
1091 template <typename Predicate>
1092 value_type * do_erase( hash_type const& hash, Predicate pred)
1094 assert(gc::is_locked());
1095 traverse_data pos( hash, *this );
1096 hash_comparator cmp;
1099 node_ptr slot = base_class::traverse( pos );
1100 assert( slot.bits() == 0 );
1102 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) == slot ) {
1104 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 && pred( *slot.ptr())) {
1105 // item found - replace it with nullptr
1106 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong( slot, node_ptr( nullptr ), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1108 stats().onEraseSuccess();
1112 stats().onEraseRetry();
1115 stats().onEraseFailed();
1119 // the slot is empty
1120 stats().onEraseFailed();
1125 stats().onSlotChanged();
1129 value_type * search(hash_type const& hash )
1131 assert( gc::is_locked());
1132 traverse_data pos( hash, *this );
1133 hash_comparator cmp;
1136 node_ptr slot = base_class::traverse( pos );
1137 assert( slot.bits() == 0 );
1139 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) != slot ) {
1140 // slot value has been changed - retry
1141 stats().onSlotChanged();
1143 else if ( slot.ptr() && cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1145 stats().onFindSuccess();
1148 stats().onFindFailed();
1157 void clear_array(array_node * pArrNode, size_t nSize)
1162 for (atomic_node_ptr * pArr = pArrNode->nodes, *pLast = pArr + nSize; pArr != pLast; ++pArr) {
1164 node_ptr slot = pArr->load(memory_model::memory_order_acquire);
1165 if (slot.bits() == base_class::flag_array_node ) {
1166 // array node, go down the tree
1167 assert(slot.ptr() != nullptr);
1168 clear_array(to_array(slot.ptr()), array_node_size());
1171 else if (slot.bits() == base_class::flag_array_converting ) {
1172 // the slot is converting to array node right now
1173 while ((slot = pArr->load(memory_model::memory_order_acquire)).bits() == base_class::flag_array_converting ) {
1175 stats().onSlotConverting();
1179 assert(slot.ptr() != nullptr);
1180 assert(slot.bits() == base_class::flag_array_node );
1181 clear_array(to_array(slot.ptr()), array_node_size());
1186 if (pArr->compare_exchange_strong(slot, node_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1188 gc::template retire_ptr<disposer>(slot.ptr());
1190 stats().onEraseSuccess();
1201 }} // namespace cds::intrusive
1203 #endif // #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H