2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 #include <folly/experimental/hazptr/debug.h>
19 #include <folly/experimental/hazptr/hazptr.h>
24 /** Set implemented as an ordered singly-linked list.
26 * A single writer thread may add or remove elements. Multiple reader
27 * threads may search the set concurrently with each other and with
28 * the writer's operations.
32 template <typename Node>
34 void operator()(Node* p) {
35 DEBUG_PRINT(p << " " << sizeof(Node));
40 class Node : public hazptr_obj_base<Node, Reclaimer<Node>> {
43 std::atomic<Node*> next_;
45 Node(T e, Node* n) : elem_(e), next_(n) {
46 DEBUG_PRINT(this << " " << e << " " << n);
55 std::atomic<Node*> head_ = {nullptr};
56 hazptr_domain& domain_;
58 /* Used by the single writer */
59 void locate_lower_bound(const T& v, std::atomic<Node*>*& prev) const {
60 auto curr = prev->load(std::memory_order_relaxed);
62 if (curr->elem_ >= v) break;
63 prev = &(curr->next_);
64 curr = curr->next_.load(std::memory_order_relaxed);
70 explicit SWMRListSet(hazptr_domain& domain = default_hazptr_domain())
75 for (auto p = head_.load(); p; p = next) {
76 next = p->next_.load();
83 locate_lower_bound(v, prev);
84 auto curr = prev->load(std::memory_order_relaxed);
85 if (curr && curr->elem_ == v) return false;
86 prev->store(new Node(std::move(v), curr));
90 bool remove(const T& v) {
92 locate_lower_bound(v, prev);
93 auto curr = prev->load(std::memory_order_relaxed);
94 if (!curr || curr->elem_ != v) return false;
95 Node *curr_next = curr->next_.load();
96 // Patch up the actual list...
97 prev->store(curr_next, std::memory_order_release);
98 // ...and only then null out the removed node.
99 curr->next_.store(nullptr, std::memory_order_release);
100 curr->retire(domain_);
103 /* Used by readers */
104 bool contains(const T& val) const {
105 /* Acquire two hazard pointers for hand-over-hand traversal. */
106 hazptr_holder hptr_prev(domain_);
107 hazptr_holder hptr_curr(domain_);
110 auto curr = prev->load(std::memory_order_acquire);
112 if (!curr) { return false; }
113 if (!hptr_curr.try_protect(curr, *prev))
115 auto next = curr->next_.load(std::memory_order_acquire);
116 if (prev->load(std::memory_order_acquire) != curr)
118 if (curr->elem_ == val) {
120 } else if (!(curr->elem_ < val)) {
121 return false; // because the list is sorted
123 prev = &(curr->next_);
125 /* Swap does not change the values of the owned hazard
126 * pointers themselves. After the swap, The hazard pointer
127 * owned by hptr_prev continues to protect the node that
128 * contains the pointer *prev. The hazard pointer owned by
129 * hptr_curr will continue to protect the node that contains
130 * the old *prev (unless the old prev was &head), which no
131 * longer needs protection, so hptr_curr's hazard pointer is
132 * now free to protect *curr in the next iteration (if curr !=
135 swap(hptr_curr, hptr_prev);
138 /* The hazard pointers are released automatically. */
142 } // namespace folly {
143 } // namespace hazptr {