7 #define relaxed memory_order_relaxed
8 #define release memory_order_release
9 #define acquire memory_order_acquire
11 static unsigned int *node_nums;
13 static unsigned int new_node()
15 return node_nums[get_thread_num()];
18 static void reclaim(unsigned int node)
20 node_nums[get_thread_num()] = node;
23 void init_queue(queue_t *q, int num_threads)
30 node_nums = malloc(num_threads * sizeof(*node_nums));
31 for (i = 0; i < num_threads; i++)
34 /* Note: needed to add this init manually */
35 atomic_init(&q->nodes[0].next, 0);
37 /* initialize queue */
38 head = MAKE_POINTER(1, 0);
39 tail = MAKE_POINTER(1, 0);
40 next = MAKE_POINTER(0, 0); // (NULL, 0)
42 atomic_init(&q->head, head);
43 atomic_init(&q->tail, tail);
44 atomic_init(&q->nodes[1].next, next);
46 /* initialize avail list */
47 for (i = 2; i < MAX_NODES; i++) {
48 next = MAKE_POINTER(i + 1, 0);
49 atomic_init(&q->nodes[i].next, next);
52 next = MAKE_POINTER(0, 0); // (NULL, 0)
53 atomic_init(&q->nodes[MAX_NODES].next, next);
56 void enqueue(queue_t *q, unsigned int val)
65 store_32(&q->nodes[node].value, val);
66 tmp = atomic_load_explicit(&q->nodes[node].next, relaxed);
67 set_ptr(&tmp, 0); // NULL
68 atomic_store_explicit(&q->nodes[node].next, tmp, relaxed);
71 tail = atomic_load_explicit(&q->tail, acquire);
72 next = atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire);
73 if (tail == atomic_load_explicit(&q->tail, relaxed)) {
74 if (get_ptr(next) == 0) { // == NULL
75 pointer value = MAKE_POINTER(node, get_count(next) + 1);
76 success = atomic_compare_exchange_strong_explicit(&q->nodes[get_ptr(tail)].next,
77 &next, value, memory_order_acq_rel, memory_order_acq_rel);
80 unsigned int ptr = get_ptr(atomic_load_explicit(&q->nodes[get_ptr(tail)].next, memory_order_seq_cst));
81 pointer value = MAKE_POINTER(ptr,
83 atomic_compare_exchange_strong_explicit(&q->tail,
85 memory_order_acq_rel, memory_order_acq_rel);
90 atomic_compare_exchange_strong_explicit(&q->tail,
92 MAKE_POINTER(node, get_count(tail) + 1),
93 memory_order_acq_rel, memory_order_acq_rel);
96 unsigned int dequeue(queue_t *q)
105 head = atomic_load_explicit(&q->head, acquire);
106 tail = atomic_load_explicit(&q->tail, acquire);
107 next = atomic_load_explicit(&q->nodes[get_ptr(head)].next, acquire);
108 if (atomic_load_explicit(&q->head, relaxed) == head) {
109 if (get_ptr(head) == get_ptr(tail)) {
110 if (get_ptr(next) == 0) { // NULL
113 atomic_compare_exchange_strong_explicit(&q->tail,
115 MAKE_POINTER(get_ptr(next), get_count(tail) + 1),
116 memory_order_acq_rel, memory_order_acq_rel);
119 value = load_32(&q->nodes[get_ptr(next)].value);
120 success = atomic_compare_exchange_strong_explicit(&q->head,
122 MAKE_POINTER(get_ptr(next), get_count(head) + 1),
123 memory_order_acq_rel, memory_order_acq_rel);
129 reclaim(get_ptr(head));