Commit state of repository at time of OOPSLA 2015 submission.
[satcheck.git] / test / reference / ms-queue-atomics.c
1 #include "threads.h"
2 #include <stdlib.h>
3 // #include "librace.h"
4 // #include "model-assert.h"
5
6 #include "ms-queue.h"
7 #include "libinterface.h"
8
9 #define relaxed memory_order_relaxed
10 #define release memory_order_release
11 #define acquire memory_order_acquire
12
13 #define MAX_FREELIST 4 /* Each thread can own up to MAX_FREELIST free nodes */
14 #define INITIAL_FREE 2 /* Each thread starts with INITIAL_FREE free nodes */
15
16 #define POISON_IDX 0x666
17
18 static unsigned int (*free_lists)[MAX_FREELIST];
19
20 /* Search this thread's free list for a "new" node */
21 static unsigned int new_node()
22 {
23         int i;
24         int t = get_thread_num();
25         for (i = 0; i < MAX_FREELIST; i++) {
26                 unsigned int node = load_32(&free_lists[t][i]);
27                 if (node) {
28                         store_32(&free_lists[t][i], 0);
29                         return node;
30                 }
31         }
32         /* free_list is empty? */
33         // MODEL_ASSERT(0);
34         return 0;
35 }
36
37 /* Place this node index back on this thread's free list */
38 static void reclaim(unsigned int node)
39 {
40         int i;
41         int t = get_thread_num();
42
43         /* Don't reclaim NULL node */
44         // MODEL_ASSERT(node);
45
46         for (i = 0; i < MAX_FREELIST; i++) {
47                 /* Should never race with our own thread here */
48                 unsigned int idx = load_32(&free_lists[t][i]);
49
50                 /* Found empty spot in free list */
51                 if (idx == 0) {
52                         store_32(&free_lists[t][i], node);
53                         return;
54                 }
55         }
56         /* free list is full? */
57         // MODEL_ASSERT(0);
58 }
59
60 void init_queue(queue_t *q, int num_threads)
61 {
62         int i, j;
63
64         /* Initialize each thread's free list with INITIAL_FREE pointers */
65         /* The actual nodes are initialized with poison indexes */
66         free_lists = malloc(num_threads * sizeof(*free_lists));
67         for (i = 0; i < num_threads; i++) {
68                 for (j = 0; j < INITIAL_FREE; j++) {
69                         free_lists[i][j] = 2 + i * MAX_FREELIST + j;
70                         atomic_init(&q->nodes[free_lists[i][j]].next, MAKE_POINTER(POISON_IDX, 0));
71                 }
72         }
73
74         /* initialize queue */
75         atomic_init(&q->head, MAKE_POINTER(1, 0));
76         atomic_init(&q->tail, MAKE_POINTER(1, 0));
77         atomic_init(&q->nodes[1].next, MAKE_POINTER(0, 0));
78 }
79
80 void enqueue(queue_t *q, unsigned int val)
81 {
82         int success = 0;
83         unsigned int node;
84         pointer tail;
85         pointer next;
86         pointer tmp;
87
88         node = new_node();
89         store_32(&q->nodes[node].value, val);
90         tmp = atomic_load_explicit(&q->nodes[node].next, relaxed);
91         // manually inlined macro: set_ptr(&tmp, 0); // NULL
92         tmp = (tmp & ~PTR_MASK);
93         atomic_store_explicit(&q->nodes[node].next, tmp, relaxed);
94
95         while (!success) {
96                 pointer_t * qt = &q->tail;
97                 tail = atomic_load_explicit(qt, acquire);
98                 pointer_t * qn = &q->nodes[tail & PTR_MASK].next;
99                 next = atomic_load_explicit(qn, acquire);
100                 if (tail == atomic_load_explicit(&q->tail, relaxed)) {
101
102                         /* Check for uninitialized 'next' */
103                         // MODEL_ASSERT(get_ptr(next) != POISON_IDX);
104
105                         if ((next & PTR_MASK) == 0) { // == NULL
106                                 pointer value = MAKE_POINTER(node, ((next & COUNT_MASK) >> 32) + 1);
107                                 success = atomic_compare_exchange_strong_explicit(&q->nodes[tail & PTR_MASK].next,
108                                                 &next, value, release, release);
109                         }
110                         if (!success) {
111                                 unsigned int ptr = (atomic_load_explicit(&q->nodes[tail & PTR_MASK].next, acquire) & PTR_MASK);
112                                 pointer value = MAKE_POINTER(ptr,
113                                              ((tail & COUNT_MASK) >> 32) + 1);
114                                 atomic_compare_exchange_strong_explicit(&q->tail,
115                                                 &tail, value,
116                                                 release, release);
117                                 thrd_yield();
118                         }
119                 }
120         }
121         atomic_compare_exchange_strong_explicit(&q->tail,
122                                             &tail,
123                                             MAKE_POINTER(node, ((tail & COUNT_MASK) >> 32) + 1),
124                                             release, release);
125 }
126
127 bool dequeue(queue_t *q, unsigned int *retVal)
128 {
129         int success = 0;
130         pointer head;
131         pointer tail;
132         pointer next;
133
134         while (!success) {
135                 head = atomic_load_explicit(&q->head, acquire);
136                 tail = atomic_load_explicit(&q->tail, relaxed);
137                 next = atomic_load_explicit(&q->nodes[(head & PTR_MASK)].next, acquire);
138                 if (atomic_load_explicit(&q->head, relaxed) == head) {
139                         if ((head & PTR_MASK) == (tail & PTR_MASK)) {
140
141                                 /* Check for uninitialized 'next' */
142                                 // MODEL_ASSERT(get_ptr(next) != POISON_IDX);
143
144                                 if (get_ptr(next) == 0) { // NULL
145                                         return false; // NULL
146                                 }
147                                 atomic_compare_exchange_strong_explicit(&q->tail,
148                                                 &tail,
149                                                 MAKE_POINTER((next & PTR_MASK), ((tail & COUNT_MASK) >> 32) + 1),
150                                                 release, release);
151                                 thrd_yield();
152                         } else {
153                                 *retVal = load_32(&q->nodes[(next & PTR_MASK)].value);
154                                 success = atomic_compare_exchange_strong_explicit(&q->head,
155                                                 &head,
156                                             MAKE_POINTER((next & PTR_MASK), ((head & COUNT_MASK) >> 32) + 1),
157                                                 release, release);
158                                 if (!success)
159                                         thrd_yield();
160                         }
161                 }
162         }
163         reclaim((head & PTR_MASK));
164         return true;
165 }