1 ; RUN: opt < %s -tsan -S | FileCheck %s
2 ; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
5 define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
7 %0 = load atomic i8* %a unordered, align 1
10 ; CHECK: atomic8_load_unordered
11 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
13 define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
15 %0 = load atomic i8* %a monotonic, align 1
18 ; CHECK: atomic8_load_monotonic
19 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
21 define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
23 %0 = load atomic i8* %a acquire, align 1
26 ; CHECK: atomic8_load_acquire
27 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2)
29 define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
31 %0 = load atomic i8* %a seq_cst, align 1
34 ; CHECK: atomic8_load_seq_cst
35 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5)
37 define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
39 store atomic i8 0, i8* %a unordered, align 1
42 ; CHECK: atomic8_store_unordered
43 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
45 define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
47 store atomic i8 0, i8* %a monotonic, align 1
50 ; CHECK: atomic8_store_monotonic
51 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
53 define void @atomic8_store_release(i8* %a) nounwind uwtable {
55 store atomic i8 0, i8* %a release, align 1
58 ; CHECK: atomic8_store_release
59 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3)
61 define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
63 store atomic i8 0, i8* %a seq_cst, align 1
66 ; CHECK: atomic8_store_seq_cst
67 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5)
69 define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable {
71 atomicrmw xchg i8* %a, i8 0 monotonic
74 ; CHECK: atomic8_xchg_monotonic
75 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0)
77 define void @atomic8_add_monotonic(i8* %a) nounwind uwtable {
79 atomicrmw add i8* %a, i8 0 monotonic
82 ; CHECK: atomic8_add_monotonic
83 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0)
85 define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable {
87 atomicrmw sub i8* %a, i8 0 monotonic
90 ; CHECK: atomic8_sub_monotonic
91 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0)
93 define void @atomic8_and_monotonic(i8* %a) nounwind uwtable {
95 atomicrmw and i8* %a, i8 0 monotonic
98 ; CHECK: atomic8_and_monotonic
99 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0)
101 define void @atomic8_or_monotonic(i8* %a) nounwind uwtable {
103 atomicrmw or i8* %a, i8 0 monotonic
106 ; CHECK: atomic8_or_monotonic
107 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0)
109 define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable {
111 atomicrmw xor i8* %a, i8 0 monotonic
114 ; CHECK: atomic8_xor_monotonic
115 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0)
117 define void @atomic8_nand_monotonic(i8* %a) nounwind uwtable {
119 atomicrmw nand i8* %a, i8 0 monotonic
122 ; CHECK: atomic8_nand_monotonic
123 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 0)
125 define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable {
127 atomicrmw xchg i8* %a, i8 0 acquire
130 ; CHECK: atomic8_xchg_acquire
131 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2)
133 define void @atomic8_add_acquire(i8* %a) nounwind uwtable {
135 atomicrmw add i8* %a, i8 0 acquire
138 ; CHECK: atomic8_add_acquire
139 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2)
141 define void @atomic8_sub_acquire(i8* %a) nounwind uwtable {
143 atomicrmw sub i8* %a, i8 0 acquire
146 ; CHECK: atomic8_sub_acquire
147 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2)
149 define void @atomic8_and_acquire(i8* %a) nounwind uwtable {
151 atomicrmw and i8* %a, i8 0 acquire
154 ; CHECK: atomic8_and_acquire
155 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2)
157 define void @atomic8_or_acquire(i8* %a) nounwind uwtable {
159 atomicrmw or i8* %a, i8 0 acquire
162 ; CHECK: atomic8_or_acquire
163 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2)
165 define void @atomic8_xor_acquire(i8* %a) nounwind uwtable {
167 atomicrmw xor i8* %a, i8 0 acquire
170 ; CHECK: atomic8_xor_acquire
171 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2)
173 define void @atomic8_nand_acquire(i8* %a) nounwind uwtable {
175 atomicrmw nand i8* %a, i8 0 acquire
178 ; CHECK: atomic8_nand_acquire
179 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 2)
181 define void @atomic8_xchg_release(i8* %a) nounwind uwtable {
183 atomicrmw xchg i8* %a, i8 0 release
186 ; CHECK: atomic8_xchg_release
187 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3)
189 define void @atomic8_add_release(i8* %a) nounwind uwtable {
191 atomicrmw add i8* %a, i8 0 release
194 ; CHECK: atomic8_add_release
195 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3)
197 define void @atomic8_sub_release(i8* %a) nounwind uwtable {
199 atomicrmw sub i8* %a, i8 0 release
202 ; CHECK: atomic8_sub_release
203 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3)
205 define void @atomic8_and_release(i8* %a) nounwind uwtable {
207 atomicrmw and i8* %a, i8 0 release
210 ; CHECK: atomic8_and_release
211 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3)
213 define void @atomic8_or_release(i8* %a) nounwind uwtable {
215 atomicrmw or i8* %a, i8 0 release
218 ; CHECK: atomic8_or_release
219 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3)
221 define void @atomic8_xor_release(i8* %a) nounwind uwtable {
223 atomicrmw xor i8* %a, i8 0 release
226 ; CHECK: atomic8_xor_release
227 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3)
229 define void @atomic8_nand_release(i8* %a) nounwind uwtable {
231 atomicrmw nand i8* %a, i8 0 release
234 ; CHECK: atomic8_nand_release
235 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 3)
237 define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable {
239 atomicrmw xchg i8* %a, i8 0 acq_rel
242 ; CHECK: atomic8_xchg_acq_rel
243 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4)
245 define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable {
247 atomicrmw add i8* %a, i8 0 acq_rel
250 ; CHECK: atomic8_add_acq_rel
251 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4)
253 define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable {
255 atomicrmw sub i8* %a, i8 0 acq_rel
258 ; CHECK: atomic8_sub_acq_rel
259 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4)
261 define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable {
263 atomicrmw and i8* %a, i8 0 acq_rel
266 ; CHECK: atomic8_and_acq_rel
267 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4)
269 define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable {
271 atomicrmw or i8* %a, i8 0 acq_rel
274 ; CHECK: atomic8_or_acq_rel
275 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4)
277 define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable {
279 atomicrmw xor i8* %a, i8 0 acq_rel
282 ; CHECK: atomic8_xor_acq_rel
283 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4)
285 define void @atomic8_nand_acq_rel(i8* %a) nounwind uwtable {
287 atomicrmw nand i8* %a, i8 0 acq_rel
290 ; CHECK: atomic8_nand_acq_rel
291 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 4)
293 define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable {
295 atomicrmw xchg i8* %a, i8 0 seq_cst
298 ; CHECK: atomic8_xchg_seq_cst
299 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5)
301 define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable {
303 atomicrmw add i8* %a, i8 0 seq_cst
306 ; CHECK: atomic8_add_seq_cst
307 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5)
309 define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable {
311 atomicrmw sub i8* %a, i8 0 seq_cst
314 ; CHECK: atomic8_sub_seq_cst
315 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5)
317 define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable {
319 atomicrmw and i8* %a, i8 0 seq_cst
322 ; CHECK: atomic8_and_seq_cst
323 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5)
325 define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable {
327 atomicrmw or i8* %a, i8 0 seq_cst
330 ; CHECK: atomic8_or_seq_cst
331 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5)
333 define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable {
335 atomicrmw xor i8* %a, i8 0 seq_cst
338 ; CHECK: atomic8_xor_seq_cst
339 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5)
341 define void @atomic8_nand_seq_cst(i8* %a) nounwind uwtable {
343 atomicrmw nand i8* %a, i8 0 seq_cst
346 ; CHECK: atomic8_nand_seq_cst
347 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 5)
349 define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable {
351 cmpxchg i8* %a, i8 0, i8 1 monotonic
354 ; CHECK: atomic8_cas_monotonic
355 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0, i32 0)
357 define void @atomic8_cas_acquire(i8* %a) nounwind uwtable {
359 cmpxchg i8* %a, i8 0, i8 1 acquire
362 ; CHECK: atomic8_cas_acquire
363 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2, i32 2)
365 define void @atomic8_cas_release(i8* %a) nounwind uwtable {
367 cmpxchg i8* %a, i8 0, i8 1 release
370 ; CHECK: atomic8_cas_release
371 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 0)
373 define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable {
375 cmpxchg i8* %a, i8 0, i8 1 acq_rel
378 ; CHECK: atomic8_cas_acq_rel
379 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4, i32 2)
381 define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable {
383 cmpxchg i8* %a, i8 0, i8 1 seq_cst
386 ; CHECK: atomic8_cas_seq_cst
387 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5, i32 5)
389 define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
391 %0 = load atomic i16* %a unordered, align 2
394 ; CHECK: atomic16_load_unordered
395 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
397 define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
399 %0 = load atomic i16* %a monotonic, align 2
402 ; CHECK: atomic16_load_monotonic
403 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
405 define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
407 %0 = load atomic i16* %a acquire, align 2
410 ; CHECK: atomic16_load_acquire
411 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2)
413 define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
415 %0 = load atomic i16* %a seq_cst, align 2
418 ; CHECK: atomic16_load_seq_cst
419 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5)
421 define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
423 store atomic i16 0, i16* %a unordered, align 2
426 ; CHECK: atomic16_store_unordered
427 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
429 define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
431 store atomic i16 0, i16* %a monotonic, align 2
434 ; CHECK: atomic16_store_monotonic
435 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
437 define void @atomic16_store_release(i16* %a) nounwind uwtable {
439 store atomic i16 0, i16* %a release, align 2
442 ; CHECK: atomic16_store_release
443 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3)
445 define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
447 store atomic i16 0, i16* %a seq_cst, align 2
450 ; CHECK: atomic16_store_seq_cst
451 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5)
453 define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable {
455 atomicrmw xchg i16* %a, i16 0 monotonic
458 ; CHECK: atomic16_xchg_monotonic
459 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0)
461 define void @atomic16_add_monotonic(i16* %a) nounwind uwtable {
463 atomicrmw add i16* %a, i16 0 monotonic
466 ; CHECK: atomic16_add_monotonic
467 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0)
469 define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable {
471 atomicrmw sub i16* %a, i16 0 monotonic
474 ; CHECK: atomic16_sub_monotonic
475 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0)
477 define void @atomic16_and_monotonic(i16* %a) nounwind uwtable {
479 atomicrmw and i16* %a, i16 0 monotonic
482 ; CHECK: atomic16_and_monotonic
483 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0)
485 define void @atomic16_or_monotonic(i16* %a) nounwind uwtable {
487 atomicrmw or i16* %a, i16 0 monotonic
490 ; CHECK: atomic16_or_monotonic
491 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0)
493 define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable {
495 atomicrmw xor i16* %a, i16 0 monotonic
498 ; CHECK: atomic16_xor_monotonic
499 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0)
501 define void @atomic16_nand_monotonic(i16* %a) nounwind uwtable {
503 atomicrmw nand i16* %a, i16 0 monotonic
506 ; CHECK: atomic16_nand_monotonic
507 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 0)
509 define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable {
511 atomicrmw xchg i16* %a, i16 0 acquire
514 ; CHECK: atomic16_xchg_acquire
515 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2)
517 define void @atomic16_add_acquire(i16* %a) nounwind uwtable {
519 atomicrmw add i16* %a, i16 0 acquire
522 ; CHECK: atomic16_add_acquire
523 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2)
525 define void @atomic16_sub_acquire(i16* %a) nounwind uwtable {
527 atomicrmw sub i16* %a, i16 0 acquire
530 ; CHECK: atomic16_sub_acquire
531 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2)
533 define void @atomic16_and_acquire(i16* %a) nounwind uwtable {
535 atomicrmw and i16* %a, i16 0 acquire
538 ; CHECK: atomic16_and_acquire
539 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2)
541 define void @atomic16_or_acquire(i16* %a) nounwind uwtable {
543 atomicrmw or i16* %a, i16 0 acquire
546 ; CHECK: atomic16_or_acquire
547 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2)
549 define void @atomic16_xor_acquire(i16* %a) nounwind uwtable {
551 atomicrmw xor i16* %a, i16 0 acquire
554 ; CHECK: atomic16_xor_acquire
555 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2)
557 define void @atomic16_nand_acquire(i16* %a) nounwind uwtable {
559 atomicrmw nand i16* %a, i16 0 acquire
562 ; CHECK: atomic16_nand_acquire
563 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 2)
565 define void @atomic16_xchg_release(i16* %a) nounwind uwtable {
567 atomicrmw xchg i16* %a, i16 0 release
570 ; CHECK: atomic16_xchg_release
571 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3)
573 define void @atomic16_add_release(i16* %a) nounwind uwtable {
575 atomicrmw add i16* %a, i16 0 release
578 ; CHECK: atomic16_add_release
579 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3)
581 define void @atomic16_sub_release(i16* %a) nounwind uwtable {
583 atomicrmw sub i16* %a, i16 0 release
586 ; CHECK: atomic16_sub_release
587 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3)
589 define void @atomic16_and_release(i16* %a) nounwind uwtable {
591 atomicrmw and i16* %a, i16 0 release
594 ; CHECK: atomic16_and_release
595 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3)
597 define void @atomic16_or_release(i16* %a) nounwind uwtable {
599 atomicrmw or i16* %a, i16 0 release
602 ; CHECK: atomic16_or_release
603 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3)
605 define void @atomic16_xor_release(i16* %a) nounwind uwtable {
607 atomicrmw xor i16* %a, i16 0 release
610 ; CHECK: atomic16_xor_release
611 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3)
613 define void @atomic16_nand_release(i16* %a) nounwind uwtable {
615 atomicrmw nand i16* %a, i16 0 release
618 ; CHECK: atomic16_nand_release
619 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 3)
621 define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable {
623 atomicrmw xchg i16* %a, i16 0 acq_rel
626 ; CHECK: atomic16_xchg_acq_rel
627 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4)
629 define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable {
631 atomicrmw add i16* %a, i16 0 acq_rel
634 ; CHECK: atomic16_add_acq_rel
635 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4)
637 define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable {
639 atomicrmw sub i16* %a, i16 0 acq_rel
642 ; CHECK: atomic16_sub_acq_rel
643 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4)
645 define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable {
647 atomicrmw and i16* %a, i16 0 acq_rel
650 ; CHECK: atomic16_and_acq_rel
651 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4)
653 define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable {
655 atomicrmw or i16* %a, i16 0 acq_rel
658 ; CHECK: atomic16_or_acq_rel
659 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4)
661 define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable {
663 atomicrmw xor i16* %a, i16 0 acq_rel
666 ; CHECK: atomic16_xor_acq_rel
667 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4)
669 define void @atomic16_nand_acq_rel(i16* %a) nounwind uwtable {
671 atomicrmw nand i16* %a, i16 0 acq_rel
674 ; CHECK: atomic16_nand_acq_rel
675 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 4)
677 define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable {
679 atomicrmw xchg i16* %a, i16 0 seq_cst
682 ; CHECK: atomic16_xchg_seq_cst
683 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5)
685 define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable {
687 atomicrmw add i16* %a, i16 0 seq_cst
690 ; CHECK: atomic16_add_seq_cst
691 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5)
693 define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable {
695 atomicrmw sub i16* %a, i16 0 seq_cst
698 ; CHECK: atomic16_sub_seq_cst
699 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5)
701 define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable {
703 atomicrmw and i16* %a, i16 0 seq_cst
706 ; CHECK: atomic16_and_seq_cst
707 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5)
709 define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable {
711 atomicrmw or i16* %a, i16 0 seq_cst
714 ; CHECK: atomic16_or_seq_cst
715 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5)
717 define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable {
719 atomicrmw xor i16* %a, i16 0 seq_cst
722 ; CHECK: atomic16_xor_seq_cst
723 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5)
725 define void @atomic16_nand_seq_cst(i16* %a) nounwind uwtable {
727 atomicrmw nand i16* %a, i16 0 seq_cst
730 ; CHECK: atomic16_nand_seq_cst
731 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 5)
733 define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable {
735 cmpxchg i16* %a, i16 0, i16 1 monotonic
738 ; CHECK: atomic16_cas_monotonic
739 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0, i32 0)
741 define void @atomic16_cas_acquire(i16* %a) nounwind uwtable {
743 cmpxchg i16* %a, i16 0, i16 1 acquire
746 ; CHECK: atomic16_cas_acquire
747 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2, i32 2)
749 define void @atomic16_cas_release(i16* %a) nounwind uwtable {
751 cmpxchg i16* %a, i16 0, i16 1 release
754 ; CHECK: atomic16_cas_release
755 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3, i32 0)
757 define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable {
759 cmpxchg i16* %a, i16 0, i16 1 acq_rel
762 ; CHECK: atomic16_cas_acq_rel
763 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4, i32 2)
765 define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable {
767 cmpxchg i16* %a, i16 0, i16 1 seq_cst
770 ; CHECK: atomic16_cas_seq_cst
771 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5, i32 5)
773 define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
775 %0 = load atomic i32* %a unordered, align 4
778 ; CHECK: atomic32_load_unordered
779 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
781 define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
783 %0 = load atomic i32* %a monotonic, align 4
786 ; CHECK: atomic32_load_monotonic
787 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
789 define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
791 %0 = load atomic i32* %a acquire, align 4
794 ; CHECK: atomic32_load_acquire
795 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2)
797 define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
799 %0 = load atomic i32* %a seq_cst, align 4
802 ; CHECK: atomic32_load_seq_cst
803 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5)
805 define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
807 store atomic i32 0, i32* %a unordered, align 4
810 ; CHECK: atomic32_store_unordered
811 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
813 define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
815 store atomic i32 0, i32* %a monotonic, align 4
818 ; CHECK: atomic32_store_monotonic
819 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
821 define void @atomic32_store_release(i32* %a) nounwind uwtable {
823 store atomic i32 0, i32* %a release, align 4
826 ; CHECK: atomic32_store_release
827 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3)
829 define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
831 store atomic i32 0, i32* %a seq_cst, align 4
834 ; CHECK: atomic32_store_seq_cst
835 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5)
837 define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable {
839 atomicrmw xchg i32* %a, i32 0 monotonic
842 ; CHECK: atomic32_xchg_monotonic
843 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0)
845 define void @atomic32_add_monotonic(i32* %a) nounwind uwtable {
847 atomicrmw add i32* %a, i32 0 monotonic
850 ; CHECK: atomic32_add_monotonic
851 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0)
853 define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable {
855 atomicrmw sub i32* %a, i32 0 monotonic
858 ; CHECK: atomic32_sub_monotonic
859 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0)
861 define void @atomic32_and_monotonic(i32* %a) nounwind uwtable {
863 atomicrmw and i32* %a, i32 0 monotonic
866 ; CHECK: atomic32_and_monotonic
867 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0)
869 define void @atomic32_or_monotonic(i32* %a) nounwind uwtable {
871 atomicrmw or i32* %a, i32 0 monotonic
874 ; CHECK: atomic32_or_monotonic
875 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0)
877 define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable {
879 atomicrmw xor i32* %a, i32 0 monotonic
882 ; CHECK: atomic32_xor_monotonic
883 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0)
885 define void @atomic32_nand_monotonic(i32* %a) nounwind uwtable {
887 atomicrmw nand i32* %a, i32 0 monotonic
890 ; CHECK: atomic32_nand_monotonic
891 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 0)
893 define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable {
895 atomicrmw xchg i32* %a, i32 0 acquire
898 ; CHECK: atomic32_xchg_acquire
899 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2)
901 define void @atomic32_add_acquire(i32* %a) nounwind uwtable {
903 atomicrmw add i32* %a, i32 0 acquire
906 ; CHECK: atomic32_add_acquire
907 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2)
909 define void @atomic32_sub_acquire(i32* %a) nounwind uwtable {
911 atomicrmw sub i32* %a, i32 0 acquire
914 ; CHECK: atomic32_sub_acquire
915 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2)
917 define void @atomic32_and_acquire(i32* %a) nounwind uwtable {
919 atomicrmw and i32* %a, i32 0 acquire
922 ; CHECK: atomic32_and_acquire
923 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2)
925 define void @atomic32_or_acquire(i32* %a) nounwind uwtable {
927 atomicrmw or i32* %a, i32 0 acquire
930 ; CHECK: atomic32_or_acquire
931 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2)
933 define void @atomic32_xor_acquire(i32* %a) nounwind uwtable {
935 atomicrmw xor i32* %a, i32 0 acquire
938 ; CHECK: atomic32_xor_acquire
939 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2)
941 define void @atomic32_nand_acquire(i32* %a) nounwind uwtable {
943 atomicrmw nand i32* %a, i32 0 acquire
946 ; CHECK: atomic32_nand_acquire
947 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 2)
949 define void @atomic32_xchg_release(i32* %a) nounwind uwtable {
951 atomicrmw xchg i32* %a, i32 0 release
954 ; CHECK: atomic32_xchg_release
955 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3)
957 define void @atomic32_add_release(i32* %a) nounwind uwtable {
959 atomicrmw add i32* %a, i32 0 release
962 ; CHECK: atomic32_add_release
963 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3)
965 define void @atomic32_sub_release(i32* %a) nounwind uwtable {
967 atomicrmw sub i32* %a, i32 0 release
970 ; CHECK: atomic32_sub_release
971 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3)
973 define void @atomic32_and_release(i32* %a) nounwind uwtable {
975 atomicrmw and i32* %a, i32 0 release
978 ; CHECK: atomic32_and_release
979 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3)
981 define void @atomic32_or_release(i32* %a) nounwind uwtable {
983 atomicrmw or i32* %a, i32 0 release
986 ; CHECK: atomic32_or_release
987 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3)
989 define void @atomic32_xor_release(i32* %a) nounwind uwtable {
991 atomicrmw xor i32* %a, i32 0 release
994 ; CHECK: atomic32_xor_release
995 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3)
997 define void @atomic32_nand_release(i32* %a) nounwind uwtable {
999 atomicrmw nand i32* %a, i32 0 release
1002 ; CHECK: atomic32_nand_release
1003 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 3)
1005 define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable {
1007 atomicrmw xchg i32* %a, i32 0 acq_rel
1010 ; CHECK: atomic32_xchg_acq_rel
1011 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4)
1013 define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable {
1015 atomicrmw add i32* %a, i32 0 acq_rel
1018 ; CHECK: atomic32_add_acq_rel
1019 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4)
1021 define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable {
1023 atomicrmw sub i32* %a, i32 0 acq_rel
1026 ; CHECK: atomic32_sub_acq_rel
1027 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4)
1029 define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable {
1031 atomicrmw and i32* %a, i32 0 acq_rel
1034 ; CHECK: atomic32_and_acq_rel
1035 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4)
1037 define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable {
1039 atomicrmw or i32* %a, i32 0 acq_rel
1042 ; CHECK: atomic32_or_acq_rel
1043 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4)
1045 define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable {
1047 atomicrmw xor i32* %a, i32 0 acq_rel
1050 ; CHECK: atomic32_xor_acq_rel
1051 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4)
1053 define void @atomic32_nand_acq_rel(i32* %a) nounwind uwtable {
1055 atomicrmw nand i32* %a, i32 0 acq_rel
1058 ; CHECK: atomic32_nand_acq_rel
1059 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 4)
1061 define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable {
1063 atomicrmw xchg i32* %a, i32 0 seq_cst
1066 ; CHECK: atomic32_xchg_seq_cst
1067 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5)
1069 define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable {
1071 atomicrmw add i32* %a, i32 0 seq_cst
1074 ; CHECK: atomic32_add_seq_cst
1075 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5)
1077 define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable {
1079 atomicrmw sub i32* %a, i32 0 seq_cst
1082 ; CHECK: atomic32_sub_seq_cst
1083 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5)
1085 define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable {
1087 atomicrmw and i32* %a, i32 0 seq_cst
1090 ; CHECK: atomic32_and_seq_cst
1091 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5)
1093 define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable {
1095 atomicrmw or i32* %a, i32 0 seq_cst
1098 ; CHECK: atomic32_or_seq_cst
1099 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5)
1101 define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable {
1103 atomicrmw xor i32* %a, i32 0 seq_cst
1106 ; CHECK: atomic32_xor_seq_cst
1107 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5)
1109 define void @atomic32_nand_seq_cst(i32* %a) nounwind uwtable {
1111 atomicrmw nand i32* %a, i32 0 seq_cst
1114 ; CHECK: atomic32_nand_seq_cst
1115 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 5)
1117 define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable {
1119 cmpxchg i32* %a, i32 0, i32 1 monotonic
1122 ; CHECK: atomic32_cas_monotonic
1123 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0, i32 0)
1125 define void @atomic32_cas_acquire(i32* %a) nounwind uwtable {
1127 cmpxchg i32* %a, i32 0, i32 1 acquire
1130 ; CHECK: atomic32_cas_acquire
1131 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2, i32 2)
1133 define void @atomic32_cas_release(i32* %a) nounwind uwtable {
1135 cmpxchg i32* %a, i32 0, i32 1 release
1138 ; CHECK: atomic32_cas_release
1139 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3, i32 0)
1141 define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable {
1143 cmpxchg i32* %a, i32 0, i32 1 acq_rel
1146 ; CHECK: atomic32_cas_acq_rel
1147 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4, i32 2)
1149 define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable {
1151 cmpxchg i32* %a, i32 0, i32 1 seq_cst
1154 ; CHECK: atomic32_cas_seq_cst
1155 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5, i32 5)
1157 define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
1159 %0 = load atomic i64* %a unordered, align 8
1162 ; CHECK: atomic64_load_unordered
1163 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
1165 define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
1167 %0 = load atomic i64* %a monotonic, align 8
1170 ; CHECK: atomic64_load_monotonic
1171 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
1173 define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
1175 %0 = load atomic i64* %a acquire, align 8
1178 ; CHECK: atomic64_load_acquire
1179 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2)
1181 define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
1183 %0 = load atomic i64* %a seq_cst, align 8
1186 ; CHECK: atomic64_load_seq_cst
1187 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5)
1189 define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
1191 store atomic i64 0, i64* %a unordered, align 8
1194 ; CHECK: atomic64_store_unordered
1195 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
1197 define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
1199 store atomic i64 0, i64* %a monotonic, align 8
1202 ; CHECK: atomic64_store_monotonic
1203 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
1205 define void @atomic64_store_release(i64* %a) nounwind uwtable {
1207 store atomic i64 0, i64* %a release, align 8
1210 ; CHECK: atomic64_store_release
1211 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3)
1213 define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
1215 store atomic i64 0, i64* %a seq_cst, align 8
1218 ; CHECK: atomic64_store_seq_cst
1219 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5)
1221 define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable {
1223 atomicrmw xchg i64* %a, i64 0 monotonic
1226 ; CHECK: atomic64_xchg_monotonic
1227 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0)
1229 define void @atomic64_add_monotonic(i64* %a) nounwind uwtable {
1231 atomicrmw add i64* %a, i64 0 monotonic
1234 ; CHECK: atomic64_add_monotonic
1235 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0)
1237 define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable {
1239 atomicrmw sub i64* %a, i64 0 monotonic
1242 ; CHECK: atomic64_sub_monotonic
1243 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0)
1245 define void @atomic64_and_monotonic(i64* %a) nounwind uwtable {
1247 atomicrmw and i64* %a, i64 0 monotonic
1250 ; CHECK: atomic64_and_monotonic
1251 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0)
1253 define void @atomic64_or_monotonic(i64* %a) nounwind uwtable {
1255 atomicrmw or i64* %a, i64 0 monotonic
1258 ; CHECK: atomic64_or_monotonic
1259 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0)
1261 define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable {
1263 atomicrmw xor i64* %a, i64 0 monotonic
1266 ; CHECK: atomic64_xor_monotonic
1267 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0)
1269 define void @atomic64_nand_monotonic(i64* %a) nounwind uwtable {
1271 atomicrmw nand i64* %a, i64 0 monotonic
1274 ; CHECK: atomic64_nand_monotonic
1275 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 0)
1277 define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable {
1279 atomicrmw xchg i64* %a, i64 0 acquire
1282 ; CHECK: atomic64_xchg_acquire
1283 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2)
1285 define void @atomic64_add_acquire(i64* %a) nounwind uwtable {
1287 atomicrmw add i64* %a, i64 0 acquire
1290 ; CHECK: atomic64_add_acquire
1291 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2)
1293 define void @atomic64_sub_acquire(i64* %a) nounwind uwtable {
1295 atomicrmw sub i64* %a, i64 0 acquire
1298 ; CHECK: atomic64_sub_acquire
1299 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2)
1301 define void @atomic64_and_acquire(i64* %a) nounwind uwtable {
1303 atomicrmw and i64* %a, i64 0 acquire
1306 ; CHECK: atomic64_and_acquire
1307 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2)
1309 define void @atomic64_or_acquire(i64* %a) nounwind uwtable {
1311 atomicrmw or i64* %a, i64 0 acquire
1314 ; CHECK: atomic64_or_acquire
1315 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2)
1317 define void @atomic64_xor_acquire(i64* %a) nounwind uwtable {
1319 atomicrmw xor i64* %a, i64 0 acquire
1322 ; CHECK: atomic64_xor_acquire
1323 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2)
1325 define void @atomic64_nand_acquire(i64* %a) nounwind uwtable {
1327 atomicrmw nand i64* %a, i64 0 acquire
1330 ; CHECK: atomic64_nand_acquire
1331 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 2)
1333 define void @atomic64_xchg_release(i64* %a) nounwind uwtable {
1335 atomicrmw xchg i64* %a, i64 0 release
1338 ; CHECK: atomic64_xchg_release
1339 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3)
1341 define void @atomic64_add_release(i64* %a) nounwind uwtable {
1343 atomicrmw add i64* %a, i64 0 release
1346 ; CHECK: atomic64_add_release
1347 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3)
1349 define void @atomic64_sub_release(i64* %a) nounwind uwtable {
1351 atomicrmw sub i64* %a, i64 0 release
1354 ; CHECK: atomic64_sub_release
1355 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3)
1357 define void @atomic64_and_release(i64* %a) nounwind uwtable {
1359 atomicrmw and i64* %a, i64 0 release
1362 ; CHECK: atomic64_and_release
1363 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3)
1365 define void @atomic64_or_release(i64* %a) nounwind uwtable {
1367 atomicrmw or i64* %a, i64 0 release
1370 ; CHECK: atomic64_or_release
1371 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3)
1373 define void @atomic64_xor_release(i64* %a) nounwind uwtable {
1375 atomicrmw xor i64* %a, i64 0 release
1378 ; CHECK: atomic64_xor_release
1379 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3)
1381 define void @atomic64_nand_release(i64* %a) nounwind uwtable {
1383 atomicrmw nand i64* %a, i64 0 release
1386 ; CHECK: atomic64_nand_release
1387 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 3)
1389 define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable {
1391 atomicrmw xchg i64* %a, i64 0 acq_rel
1394 ; CHECK: atomic64_xchg_acq_rel
1395 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4)
1397 define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable {
1399 atomicrmw add i64* %a, i64 0 acq_rel
1402 ; CHECK: atomic64_add_acq_rel
1403 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4)
1405 define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable {
1407 atomicrmw sub i64* %a, i64 0 acq_rel
1410 ; CHECK: atomic64_sub_acq_rel
1411 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4)
1413 define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable {
1415 atomicrmw and i64* %a, i64 0 acq_rel
1418 ; CHECK: atomic64_and_acq_rel
1419 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4)
1421 define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable {
1423 atomicrmw or i64* %a, i64 0 acq_rel
1426 ; CHECK: atomic64_or_acq_rel
1427 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4)
1429 define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable {
1431 atomicrmw xor i64* %a, i64 0 acq_rel
1434 ; CHECK: atomic64_xor_acq_rel
1435 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4)
1437 define void @atomic64_nand_acq_rel(i64* %a) nounwind uwtable {
1439 atomicrmw nand i64* %a, i64 0 acq_rel
1442 ; CHECK: atomic64_nand_acq_rel
1443 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 4)
1445 define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable {
1447 atomicrmw xchg i64* %a, i64 0 seq_cst
1450 ; CHECK: atomic64_xchg_seq_cst
1451 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5)
1453 define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable {
1455 atomicrmw add i64* %a, i64 0 seq_cst
1458 ; CHECK: atomic64_add_seq_cst
1459 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5)
1461 define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable {
1463 atomicrmw sub i64* %a, i64 0 seq_cst
1466 ; CHECK: atomic64_sub_seq_cst
1467 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5)
1469 define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable {
1471 atomicrmw and i64* %a, i64 0 seq_cst
1474 ; CHECK: atomic64_and_seq_cst
1475 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5)
1477 define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable {
1479 atomicrmw or i64* %a, i64 0 seq_cst
1482 ; CHECK: atomic64_or_seq_cst
1483 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5)
1485 define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable {
1487 atomicrmw xor i64* %a, i64 0 seq_cst
1490 ; CHECK: atomic64_xor_seq_cst
1491 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5)
1493 define void @atomic64_nand_seq_cst(i64* %a) nounwind uwtable {
1495 atomicrmw nand i64* %a, i64 0 seq_cst
1498 ; CHECK: atomic64_nand_seq_cst
1499 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 5)
1501 define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable {
1503 cmpxchg i64* %a, i64 0, i64 1 monotonic
1506 ; CHECK: atomic64_cas_monotonic
1507 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0, i32 0)
1509 define void @atomic64_cas_acquire(i64* %a) nounwind uwtable {
1511 cmpxchg i64* %a, i64 0, i64 1 acquire
1514 ; CHECK: atomic64_cas_acquire
1515 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2, i32 2)
1517 define void @atomic64_cas_release(i64* %a) nounwind uwtable {
1519 cmpxchg i64* %a, i64 0, i64 1 release
1522 ; CHECK: atomic64_cas_release
1523 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3, i32 0)
1525 define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable {
1527 cmpxchg i64* %a, i64 0, i64 1 acq_rel
1530 ; CHECK: atomic64_cas_acq_rel
1531 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4, i32 2)
1533 define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable {
1535 cmpxchg i64* %a, i64 0, i64 1 seq_cst
1538 ; CHECK: atomic64_cas_seq_cst
1539 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5, i32 5)
1541 define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
1543 %0 = load atomic i128* %a unordered, align 16
1546 ; CHECK: atomic128_load_unordered
1547 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
1549 define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
1551 %0 = load atomic i128* %a monotonic, align 16
1554 ; CHECK: atomic128_load_monotonic
1555 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
1557 define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
1559 %0 = load atomic i128* %a acquire, align 16
1562 ; CHECK: atomic128_load_acquire
1563 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2)
1565 define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
1567 %0 = load atomic i128* %a seq_cst, align 16
1570 ; CHECK: atomic128_load_seq_cst
1571 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5)
1573 define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
1575 store atomic i128 0, i128* %a unordered, align 16
1578 ; CHECK: atomic128_store_unordered
1579 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
1581 define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
1583 store atomic i128 0, i128* %a monotonic, align 16
1586 ; CHECK: atomic128_store_monotonic
1587 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
1589 define void @atomic128_store_release(i128* %a) nounwind uwtable {
1591 store atomic i128 0, i128* %a release, align 16
1594 ; CHECK: atomic128_store_release
1595 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3)
1597 define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
1599 store atomic i128 0, i128* %a seq_cst, align 16
1602 ; CHECK: atomic128_store_seq_cst
1603 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5)
1605 define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable {
1607 atomicrmw xchg i128* %a, i128 0 monotonic
1610 ; CHECK: atomic128_xchg_monotonic
1611 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0)
1613 define void @atomic128_add_monotonic(i128* %a) nounwind uwtable {
1615 atomicrmw add i128* %a, i128 0 monotonic
1618 ; CHECK: atomic128_add_monotonic
1619 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0)
1621 define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable {
1623 atomicrmw sub i128* %a, i128 0 monotonic
1626 ; CHECK: atomic128_sub_monotonic
1627 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0)
1629 define void @atomic128_and_monotonic(i128* %a) nounwind uwtable {
1631 atomicrmw and i128* %a, i128 0 monotonic
1634 ; CHECK: atomic128_and_monotonic
1635 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0)
1637 define void @atomic128_or_monotonic(i128* %a) nounwind uwtable {
1639 atomicrmw or i128* %a, i128 0 monotonic
1642 ; CHECK: atomic128_or_monotonic
1643 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0)
1645 define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable {
1647 atomicrmw xor i128* %a, i128 0 monotonic
1650 ; CHECK: atomic128_xor_monotonic
1651 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0)
1653 define void @atomic128_nand_monotonic(i128* %a) nounwind uwtable {
1655 atomicrmw nand i128* %a, i128 0 monotonic
1658 ; CHECK: atomic128_nand_monotonic
1659 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 0)
1661 define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable {
1663 atomicrmw xchg i128* %a, i128 0 acquire
1666 ; CHECK: atomic128_xchg_acquire
1667 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2)
1669 define void @atomic128_add_acquire(i128* %a) nounwind uwtable {
1671 atomicrmw add i128* %a, i128 0 acquire
1674 ; CHECK: atomic128_add_acquire
1675 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2)
1677 define void @atomic128_sub_acquire(i128* %a) nounwind uwtable {
1679 atomicrmw sub i128* %a, i128 0 acquire
1682 ; CHECK: atomic128_sub_acquire
1683 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2)
1685 define void @atomic128_and_acquire(i128* %a) nounwind uwtable {
1687 atomicrmw and i128* %a, i128 0 acquire
1690 ; CHECK: atomic128_and_acquire
1691 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2)
1693 define void @atomic128_or_acquire(i128* %a) nounwind uwtable {
1695 atomicrmw or i128* %a, i128 0 acquire
1698 ; CHECK: atomic128_or_acquire
1699 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2)
1701 define void @atomic128_xor_acquire(i128* %a) nounwind uwtable {
1703 atomicrmw xor i128* %a, i128 0 acquire
1706 ; CHECK: atomic128_xor_acquire
1707 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2)
1709 define void @atomic128_nand_acquire(i128* %a) nounwind uwtable {
1711 atomicrmw nand i128* %a, i128 0 acquire
1714 ; CHECK: atomic128_nand_acquire
1715 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 2)
1717 define void @atomic128_xchg_release(i128* %a) nounwind uwtable {
1719 atomicrmw xchg i128* %a, i128 0 release
1722 ; CHECK: atomic128_xchg_release
1723 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3)
1725 define void @atomic128_add_release(i128* %a) nounwind uwtable {
1727 atomicrmw add i128* %a, i128 0 release
1730 ; CHECK: atomic128_add_release
1731 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3)
1733 define void @atomic128_sub_release(i128* %a) nounwind uwtable {
1735 atomicrmw sub i128* %a, i128 0 release
1738 ; CHECK: atomic128_sub_release
1739 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3)
1741 define void @atomic128_and_release(i128* %a) nounwind uwtable {
1743 atomicrmw and i128* %a, i128 0 release
1746 ; CHECK: atomic128_and_release
1747 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3)
1749 define void @atomic128_or_release(i128* %a) nounwind uwtable {
1751 atomicrmw or i128* %a, i128 0 release
1754 ; CHECK: atomic128_or_release
1755 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3)
1757 define void @atomic128_xor_release(i128* %a) nounwind uwtable {
1759 atomicrmw xor i128* %a, i128 0 release
1762 ; CHECK: atomic128_xor_release
1763 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3)
1765 define void @atomic128_nand_release(i128* %a) nounwind uwtable {
1767 atomicrmw nand i128* %a, i128 0 release
1770 ; CHECK: atomic128_nand_release
1771 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 3)
1773 define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable {
1775 atomicrmw xchg i128* %a, i128 0 acq_rel
1778 ; CHECK: atomic128_xchg_acq_rel
1779 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4)
1781 define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable {
1783 atomicrmw add i128* %a, i128 0 acq_rel
1786 ; CHECK: atomic128_add_acq_rel
1787 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4)
1789 define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable {
1791 atomicrmw sub i128* %a, i128 0 acq_rel
1794 ; CHECK: atomic128_sub_acq_rel
1795 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4)
1797 define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable {
1799 atomicrmw and i128* %a, i128 0 acq_rel
1802 ; CHECK: atomic128_and_acq_rel
1803 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4)
1805 define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable {
1807 atomicrmw or i128* %a, i128 0 acq_rel
1810 ; CHECK: atomic128_or_acq_rel
1811 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4)
1813 define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable {
1815 atomicrmw xor i128* %a, i128 0 acq_rel
1818 ; CHECK: atomic128_xor_acq_rel
1819 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4)
1821 define void @atomic128_nand_acq_rel(i128* %a) nounwind uwtable {
1823 atomicrmw nand i128* %a, i128 0 acq_rel
1826 ; CHECK: atomic128_nand_acq_rel
1827 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 4)
1829 define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable {
1831 atomicrmw xchg i128* %a, i128 0 seq_cst
1834 ; CHECK: atomic128_xchg_seq_cst
1835 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5)
1837 define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable {
1839 atomicrmw add i128* %a, i128 0 seq_cst
1842 ; CHECK: atomic128_add_seq_cst
1843 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5)
1845 define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable {
1847 atomicrmw sub i128* %a, i128 0 seq_cst
1850 ; CHECK: atomic128_sub_seq_cst
1851 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5)
1853 define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable {
1855 atomicrmw and i128* %a, i128 0 seq_cst
1858 ; CHECK: atomic128_and_seq_cst
1859 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5)
1861 define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable {
1863 atomicrmw or i128* %a, i128 0 seq_cst
1866 ; CHECK: atomic128_or_seq_cst
1867 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5)
1869 define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable {
1871 atomicrmw xor i128* %a, i128 0 seq_cst
1874 ; CHECK: atomic128_xor_seq_cst
1875 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5)
1877 define void @atomic128_nand_seq_cst(i128* %a) nounwind uwtable {
1879 atomicrmw nand i128* %a, i128 0 seq_cst
1882 ; CHECK: atomic128_nand_seq_cst
1883 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 5)
1885 define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable {
1887 cmpxchg i128* %a, i128 0, i128 1 monotonic
1890 ; CHECK: atomic128_cas_monotonic
1891 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0, i32 0)
1893 define void @atomic128_cas_acquire(i128* %a) nounwind uwtable {
1895 cmpxchg i128* %a, i128 0, i128 1 acquire
1898 ; CHECK: atomic128_cas_acquire
1899 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2, i32 2)
1901 define void @atomic128_cas_release(i128* %a) nounwind uwtable {
1903 cmpxchg i128* %a, i128 0, i128 1 release
1906 ; CHECK: atomic128_cas_release
1907 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3, i32 0)
1909 define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable {
1911 cmpxchg i128* %a, i128 0, i128 1 acq_rel
1914 ; CHECK: atomic128_cas_acq_rel
1915 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4, i32 2)
1917 define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable {
1919 cmpxchg i128* %a, i128 0, i128 1 seq_cst
1922 ; CHECK: atomic128_cas_seq_cst
1923 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5, i32 5)
1925 define void @atomic_signal_fence_acquire() nounwind uwtable {
1927 fence singlethread acquire
1930 ; CHECK: atomic_signal_fence_acquire
1931 ; CHECK: call void @__tsan_atomic_signal_fence(i32 2)
1933 define void @atomic_thread_fence_acquire() nounwind uwtable {
1938 ; CHECK: atomic_thread_fence_acquire
1939 ; CHECK: call void @__tsan_atomic_thread_fence(i32 2)
1941 define void @atomic_signal_fence_release() nounwind uwtable {
1943 fence singlethread release
1946 ; CHECK: atomic_signal_fence_release
1947 ; CHECK: call void @__tsan_atomic_signal_fence(i32 3)
1949 define void @atomic_thread_fence_release() nounwind uwtable {
1954 ; CHECK: atomic_thread_fence_release
1955 ; CHECK: call void @__tsan_atomic_thread_fence(i32 3)
1957 define void @atomic_signal_fence_acq_rel() nounwind uwtable {
1959 fence singlethread acq_rel
1962 ; CHECK: atomic_signal_fence_acq_rel
1963 ; CHECK: call void @__tsan_atomic_signal_fence(i32 4)
1965 define void @atomic_thread_fence_acq_rel() nounwind uwtable {
1970 ; CHECK: atomic_thread_fence_acq_rel
1971 ; CHECK: call void @__tsan_atomic_thread_fence(i32 4)
1973 define void @atomic_signal_fence_seq_cst() nounwind uwtable {
1975 fence singlethread seq_cst
1978 ; CHECK: atomic_signal_fence_seq_cst
1979 ; CHECK: call void @__tsan_atomic_signal_fence(i32 5)
1981 define void @atomic_thread_fence_seq_cst() nounwind uwtable {
1986 ; CHECK: atomic_thread_fence_seq_cst
1987 ; CHECK: call void @__tsan_atomic_thread_fence(i32 5)