4 #include "model-assert.h"
8 #define relaxed memory_order_relaxed
9 #define release memory_order_release
10 #define acquire memory_order_acquire
12 #define MAX_FREELIST 4 /* Each thread can own up to MAX_FREELIST free nodes */
13 #define INITIAL_FREE 2 /* Each thread starts with INITIAL_FREE free nodes */
15 #define POISON_IDX 0x666
17 static unsigned int (*free_lists)[MAX_FREELIST];
19 /* Search this thread's free list for a "new" node */
20 static unsigned int new_node()
23 int t = get_thread_num();
24 for (i = 0; i < MAX_FREELIST; i++) {
25 //unsigned int node = load_32(&free_lists[t][i]);
26 unsigned int node = free_lists[t][i];
28 //store_32(&free_lists[t][i], 0);
33 /* free_list is empty? */
38 /* Simulate the fact that when a node got recycled, it will get assigned to the
39 * same queue or for other usage */
40 void simulateRecycledNodeUpdate(queue_t *q, unsigned int node) {
41 atomic_store_explicit(&q->nodes[node].next, -1, memory_order_release);
45 /* Place this node index back on this thread's free list */
46 static void reclaim(unsigned int node)
49 int t = get_thread_num();
51 /* Don't reclaim NULL node */
54 for (i = 0; i < MAX_FREELIST; i++) {
55 /* Should never race with our own thread here */
56 //unsigned int idx = load_32(&free_lists[t][i]);
57 unsigned int idx = free_lists[t][i];
59 /* Found empty spot in free list */
61 //store_32(&free_lists[t][i], node);
62 free_lists[t][i] = node;
66 /* free list is full? */
70 void init_queue(queue_t *q, int num_threads)
74 /* Initialize each thread's free list with INITIAL_FREE pointers */
75 /* The actual nodes are initialized with poison indexes */
76 free_lists = ( unsigned int (*)[MAX_FREELIST] ) malloc(num_threads * sizeof(*free_lists));
77 for (i = 0; i < num_threads; i++) {
78 for (j = 0; j < INITIAL_FREE; j++) {
79 free_lists[i][j] = 2 + i * MAX_FREELIST + j;
80 atomic_init(&q->nodes[free_lists[i][j]].next, MAKE_POINTER(POISON_IDX, 0));
84 /* initialize queue */
85 atomic_init(&q->head, MAKE_POINTER(1, 0));
86 atomic_init(&q->tail, MAKE_POINTER(1, 0));
87 atomic_init(&q->nodes[1].next, MAKE_POINTER(0, 0));
90 /** @DeclareState: IntList *q;
91 @Commutativity: enqueue <-> dequeue (true)
92 @Commutativity: dequeue <-> dequeue (!M1->RET || !M2->RET) */
94 /** @Transition: STATE(q)->push_back(val); */
95 void enqueue(queue_t *q, unsigned int val, int n)
104 //store_32(&q->nodes[node].value, val);
105 q->nodes[node].value = val;
106 tmp = atomic_load_explicit(&q->nodes[node].next, relaxed);
107 set_ptr(&tmp, 0); // NULL
108 // This is a found bug in AutoMO, and testcase4 can reveal this known bug
109 atomic_store_explicit(&q->nodes[node].next, tmp, release);
112 /********** Detected UL **********/
113 tail = atomic_load_explicit(&q->tail, acquire);
114 /********** Detected Admissibility (testcase4) **********/
115 next = atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire);
116 if (tail == atomic_load_explicit(&q->tail, relaxed)) {
118 /* Check for uninitialized 'next' */
119 //MODEL_ASSERT(get_ptr(next) != POISON_IDX);
121 if (get_ptr(next) == 0) { // == NULL
122 pointer value = MAKE_POINTER(node, get_count(next) + 1);
123 /********** Detected Correctness (testcase1) **********/
124 success = atomic_compare_exchange_strong_explicit(&q->nodes[get_ptr(tail)].next,
125 &next, value, release, release);
126 /** @OPClearDefine: success */
129 /********** Detected UL **********/
130 unsigned int ptr = get_ptr(atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire));
131 pointer value = MAKE_POINTER(ptr,
132 get_count(tail) + 1);
133 /********** Detected Correctness (testcase2) **********/
134 atomic_compare_exchange_strong_explicit(&q->tail,
141 /********** Detected Corrctness (testcase1) **********/
142 atomic_compare_exchange_strong_explicit(&q->tail,
144 MAKE_POINTER(node, get_count(tail) + 1),
148 /** @Transition: if (RET) {
149 if (STATE(q)->empty()) return false;
150 STATE(q)->pop_front();
152 @PreCondition: return RET ? !STATE(q)->empty() && STATE(q)->front() == *retVal : true; */
153 bool dequeue(queue_t *q, unsigned int *retVal, unsigned int *reclaimNode)
161 /********** Dectected Admissibility (testcase3) **********/
162 head = atomic_load_explicit(&q->head, acquire);
163 /********** Detected KNOWN BUG **********/
164 tail = atomic_load_explicit(&q->tail, acquire);
165 /********** Detected Correctness (testcase1) **********/
166 next = atomic_load_explicit(&q->nodes[get_ptr(head)].next, acquire);
167 /** @OPClearDefine: true */
168 if (atomic_load_explicit(&q->head, relaxed) == head) {
169 if (get_ptr(head) == get_ptr(tail)) {
171 /* Check for uninitialized 'next' */
172 MODEL_ASSERT(get_ptr(next) != POISON_IDX);
174 if (get_ptr(next) == 0) { // NULL
175 return false; // NULL
177 /********** Detected UL **********/
178 atomic_compare_exchange_strong_explicit(&q->tail,
180 MAKE_POINTER(get_ptr(next), get_count(tail) + 1),
184 //*retVal = load_32(&q->nodes[get_ptr(next)].value);
185 *retVal = q->nodes[get_ptr(next)].value;
186 /********** Detected Admissibility (testcase3) **********/
187 success = atomic_compare_exchange_strong_explicit(&q->head,
189 MAKE_POINTER(get_ptr(next), get_count(head) + 1),
196 *reclaimNode = get_ptr(head);
197 reclaim(get_ptr(head));