The CDSSpec checker's benchmarks
[model-checker-benchmarks.git] / ms-queue-loose / queue.c
1 #include <threads.h>
2 #include <stdlib.h>
3 #include "librace.h"
4 #include "model-assert.h"
5
6 #include "queue.h"
7
8 #define relaxed memory_order_relaxed
9 #define release memory_order_release
10 #define acquire memory_order_acquire
11
12 #define MAX_FREELIST 4 /* Each thread can own up to MAX_FREELIST free nodes */
13 #define INITIAL_FREE 2 /* Each thread starts with INITIAL_FREE free nodes */
14
15 #define POISON_IDX 0x666
16
17 static unsigned int (*free_lists)[MAX_FREELIST];
18
19 /* Search this thread's free list for a "new" node */
20 static unsigned int new_node()
21 {
22         int i;
23         int t = get_thread_num();
24         for (i = 0; i < MAX_FREELIST; i++) {
25                 //unsigned int node = load_32(&free_lists[t][i]);
26                 unsigned int node = free_lists[t][i];
27                 if (node) {
28                         //store_32(&free_lists[t][i], 0);
29                         free_lists[t][i] = 0;
30                         return node;
31                 }
32         }
33         /* free_list is empty? */
34         MODEL_ASSERT(0);
35         return 0;
36 }
37
38 /* Simulate the fact that when a node got recycled, it will get assigned to the
39  * same queue or for other usage */
40 void simulateRecycledNodeUpdate(queue_t *q, unsigned int node) {
41         atomic_store_explicit(&q->nodes[node].next, -1, memory_order_release);
42 }
43
44
45 /* Place this node index back on this thread's free list */
46 static void reclaim(unsigned int node)
47 {
48         int i;
49         int t = get_thread_num();
50
51         /* Don't reclaim NULL node */
52         //MODEL_ASSERT(node);
53
54         for (i = 0; i < MAX_FREELIST; i++) {
55                 /* Should never race with our own thread here */
56                 //unsigned int idx = load_32(&free_lists[t][i]);
57                 unsigned int idx = free_lists[t][i];
58
59                 /* Found empty spot in free list */
60                 if (idx == 0) {
61                         //store_32(&free_lists[t][i], node);
62                         free_lists[t][i] = node;
63                         return;
64                 }
65         }
66         /* free list is full? */
67         //MODEL_ASSERT(0);
68 }
69
70 void init_queue(queue_t *q, int num_threads)
71 {
72         int i, j;
73
74         /* Initialize each thread's free list with INITIAL_FREE pointers */
75         /* The actual nodes are initialized with poison indexes */
76         free_lists = ( unsigned int (*)[MAX_FREELIST] ) malloc(num_threads * sizeof(*free_lists));
77         for (i = 0; i < num_threads; i++) {
78                 for (j = 0; j < INITIAL_FREE; j++) {
79                         free_lists[i][j] = 2 + i * MAX_FREELIST + j;
80                         atomic_init(&q->nodes[free_lists[i][j]].next, MAKE_POINTER(POISON_IDX, 0));
81                 }
82         }
83
84         /* initialize queue */
85         atomic_init(&q->head, MAKE_POINTER(1, 0));
86         atomic_init(&q->tail, MAKE_POINTER(1, 0));
87         atomic_init(&q->nodes[1].next, MAKE_POINTER(0, 0));
88 }
89
90 /** @DeclareState: IntList *q;
91 @Commutativity: enqueue <-> dequeue (true)
92 @Commutativity: dequeue <-> dequeue (!M1->RET || !M2->RET) */
93
94 /** @Transition: STATE(q)->push_back(val); */
95 void enqueue(queue_t *q, unsigned int val, int n)
96 {
97         int success = 0;
98         unsigned int node;
99         pointer tail;
100         pointer next;
101         pointer tmp;
102
103         node = new_node();
104         //store_32(&q->nodes[node].value, val);
105         q->nodes[node].value = val;
106         tmp = atomic_load_explicit(&q->nodes[node].next, relaxed);
107         set_ptr(&tmp, 0); // NULL
108         // This is a found bug in AutoMO, and testcase4 can reveal this known bug
109         atomic_store_explicit(&q->nodes[node].next, tmp, release);
110
111         while (!success) {
112                 /**********    Detected UL    **********/
113                 tail = atomic_load_explicit(&q->tail, acquire);
114                 /**********    Detected Admissibility (testcase4)    **********/
115                 next = atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire);
116                 if (tail == atomic_load_explicit(&q->tail, relaxed)) {
117
118                         /* Check for uninitialized 'next' */
119                         //MODEL_ASSERT(get_ptr(next) != POISON_IDX);
120
121                         if (get_ptr(next) == 0) { // == NULL
122                                 pointer value = MAKE_POINTER(node, get_count(next) + 1);
123                                 /**********    Detected Correctness (testcase1)    **********/
124                                 success = atomic_compare_exchange_strong_explicit(&q->nodes[get_ptr(tail)].next,
125                                                 &next, value, release, release);
126                                 /** @OPClearDefine: success */
127                         }
128                         if (!success) {
129                                 /**********    Detected UL    **********/
130                                 unsigned int ptr = get_ptr(atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire));
131                                 pointer value = MAKE_POINTER(ptr,
132                                                 get_count(tail) + 1);
133                                 /**********    Detected Correctness (testcase2)    **********/
134                                 atomic_compare_exchange_strong_explicit(&q->tail,
135                                                 &tail, value,
136                                                 release, release);
137                                 thrd_yield();
138                         }
139                 }
140         }
141         /**********    Detected Corrctness (testcase1) **********/
142         atomic_compare_exchange_strong_explicit(&q->tail,
143                         &tail,
144                         MAKE_POINTER(node, get_count(tail) + 1),
145                         release, release);
146 }
147
148 /** @Transition: if (RET) {
149         if (STATE(q)->empty()) return false;
150         STATE(q)->pop_front();
151 }
152 @PreCondition: return RET ? !STATE(q)->empty() && STATE(q)->front() == *retVal : true; */
153 bool dequeue(queue_t *q, unsigned int *retVal, unsigned int *reclaimNode)
154 {
155         int success = 0;
156         pointer head;
157         pointer tail;
158         pointer next;
159
160         while (!success) {
161                 /**********    Dectected Admissibility (testcase3)    **********/
162                 head = atomic_load_explicit(&q->head, acquire);
163                 /**********    Detected KNOWN BUG    **********/
164                 tail = atomic_load_explicit(&q->tail, acquire);
165                 /**********    Detected Correctness (testcase1) **********/
166                 next = atomic_load_explicit(&q->nodes[get_ptr(head)].next, acquire);
167                 /** @OPClearDefine: true */
168                 if (atomic_load_explicit(&q->head, relaxed) == head) {
169                         if (get_ptr(head) == get_ptr(tail)) {
170
171                                 /* Check for uninitialized 'next' */
172                                 MODEL_ASSERT(get_ptr(next) != POISON_IDX);
173
174                                 if (get_ptr(next) == 0) { // NULL
175                                         return false; // NULL
176                                 }
177                                 /**********    Detected UL    **********/
178                                 atomic_compare_exchange_strong_explicit(&q->tail,
179                                                 &tail,
180                                                 MAKE_POINTER(get_ptr(next), get_count(tail) + 1),
181                                                 release, release);
182                                 thrd_yield();
183                         } else {
184                                 //*retVal = load_32(&q->nodes[get_ptr(next)].value);
185                                 *retVal = q->nodes[get_ptr(next)].value;
186                                 /**********    Detected Admissibility (testcase3)    **********/
187                                 success = atomic_compare_exchange_strong_explicit(&q->head,
188                                                 &head,
189                                                 MAKE_POINTER(get_ptr(next), get_count(head) + 1),
190                                                 release, release);
191                                 if (!success)
192                                         thrd_yield();
193                         }
194                 }
195         }
196         *reclaimNode = get_ptr(head);
197         reclaim(get_ptr(head));
198         return true;
199 }