2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 #include <folly/experimental/hazptr/debug.h>
19 #include <folly/experimental/hazptr/hazptr.h>
24 /** Set implemented as an ordered singly-linked list.
26 * A single writer thread may add or remove elements. Multiple reader
27 * threads may search the set concurrently with each other and with
28 * the writer's operations.
32 template <typename Node>
34 void operator()(Node* p) {
35 DEBUG_PRINT(p << " " << sizeof(Node));
40 class Node : public hazptr_obj_base<Node, Reclaimer<Node>> {
43 std::atomic<Node*> next_;
45 Node(T e, Node* n) : elem_(e), next_(n) {
46 DEBUG_PRINT(this << " " << e << " " << n);
55 std::atomic<Node*> head_ = {nullptr};
57 /* Used by the single writer */
58 void locate_lower_bound(const T& v, std::atomic<Node*>*& prev) const {
59 auto curr = prev->load(std::memory_order_relaxed);
61 if (curr->elem_ >= v) break;
62 prev = &(curr->next_);
63 curr = curr->next_.load(std::memory_order_relaxed);
71 for (auto p = head_.load(); p; p = next) {
72 next = p->next_.load();
79 locate_lower_bound(v, prev);
80 auto curr = prev->load(std::memory_order_relaxed);
81 if (curr && curr->elem_ == v) return false;
82 prev->store(new Node(std::move(v), curr));
86 bool remove(const T& v) {
88 locate_lower_bound(v, prev);
89 auto curr = prev->load(std::memory_order_relaxed);
90 if (!curr || curr->elem_ != v) return false;
91 Node *curr_next = curr->next_.load();
92 // Patch up the actual list...
93 prev->store(curr_next, std::memory_order_release);
94 // ...and only then null out the removed node.
95 curr->next_.store(nullptr, std::memory_order_release);
100 /* Used by readers */
101 bool contains(const T& val) const {
102 /* Acquire two hazard pointers for hand-over-hand traversal. */
103 hazptr_holder hptr_prev;
104 hazptr_holder hptr_curr;
107 auto curr = prev->load(std::memory_order_acquire);
109 if (!curr) { return false; }
110 if (!hptr_curr.try_protect(curr, *prev))
112 auto next = curr->next_.load(std::memory_order_acquire);
113 if (prev->load(std::memory_order_acquire) != curr)
115 if (curr->elem_ == val) {
117 } else if (!(curr->elem_ < val)) {
118 return false; // because the list is sorted
120 prev = &(curr->next_);
122 /* Swap does not change the values of the owned hazard
123 * pointers themselves. After the swap, The hazard pointer
124 * owned by hptr_prev continues to protect the node that
125 * contains the pointer *prev. The hazard pointer owned by
126 * hptr_curr will continue to protect the node that contains
127 * the old *prev (unless the old prev was &head), which no
128 * longer needs protection, so hptr_curr's hazard pointer is
129 * now free to protect *curr in the next iteration (if curr !=
132 swap(hptr_curr, hptr_prev);
135 /* The hazard pointers are released automatically. */
140 } // namespace hazptr