1 //===- llvm/System/Atomic.h - Atomic Operations -----------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file declares the llvm::sys atomic operations.
12 // Portions of this file use code from libatomic_ops, for which the following
15 // Copyright (c) 2003 by Hewlett-Packard Company. All rights reserved.
17 // Permission is hereby granted, free of charge, to any person obtaining a copy
18 // of this software and associated documentation files (the "Software"), to deal
19 // in the Software without restriction, including without limitation the rights
20 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
21 // copies of the Software, and to permit persons to whom the Software is
22 // furnished to do so, subject to the following conditions:
24 // The above copyright notice and this permission notice shall be included in
25 // all copies or substantial portions of the Software.
27 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
28 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
29 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
30 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
31 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
32 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
35 //===----------------------------------------------------------------------===//
37 #ifndef LLVM_SYSTEM_ATOMIC_H
38 #define LLVM_SYSTEM_ATOMIC_H
42 #if defined(_HPUX_SOURCE) && defined(__ia64)
43 #include <machine/sys/inline.h>
44 #elif defined(_MSC_VER)
46 #endif // defined(_HPUX_SOURCE) && defined(__ia64)
52 inline void CompilerFence() {
53 #if defined(__GNUC__) && !defined(__INTEL_COMPILER)
54 __asm__ __volatile__("" : : : "memory");
55 #elif defined(_MSC_VER)
57 #elif defined(__INTEL_COMPILER)
58 __memory_barrier(); /* Too strong? IA64-only? */
60 /* We conjecture that the following usually gives us the right */
61 /* semantics or an error. */
63 #endif // defined(__GNUC__) && !defined(__INTEL_COMPILER)
66 #if !defined(ENABLE_THREADS) || ENABLE_THREADS == 0
67 inline void MemoryFence() {
71 typedef uint32_t cas_flag;
72 inline cas_flag CompareAndSwap(cas_flag* dest, cas_flag exc, cas_flag c) {
73 cas_flag result = *dest;
79 #elif defined(__GNUC__)
81 inline void MemoryFence() {
82 # if defined(__i386__) || defined(__x86_64__)
83 # if defined(__SSE2__)
84 __asm__ __volatile__("mfence" : : : "memory");
86 unsigned char dummy = 0;
87 volatile unsigned char* addr = &dummy;
89 __asm __ __volatile__("xchgb %0, %1" : "=r"(oldval),
90 "=m"(*addr), "0"(0xff), "m"(*addr) : "memory");
91 # endif // defined(__SSE2__)
92 # elif defined(__ia64__)
93 __asm__ __volatile__("mf" : : : "memory");
94 # elif defined(__alpha__)
95 __asm__ __volatile__("mb" : : : "memory");
96 # elif defined(__sparc__)
97 __asm__ __volatile__("membar #StoreStore | #LoadStore | #LoadLoad | #StoreLoad");
98 # elif defined(__powerpc__) || defined(__ppc__)
99 __asm__ __volatile__("sync" : : : "memory");
100 # elif defined(__arm__)
101 __asm__ __volatile__ ("mcr p15, 0, r0, c7, c10, 5 @ dmb");
103 } // defined(__i386__) || defined(__x86_64__)
105 typedef unsigned long cas_flag;
106 inline cas_flag CompareAndSwap(cas_flag* ptr,
108 cas_flag old_value) {
110 # if defined(__i386__) || defined(__x86_64__)
111 __asm__ __volatile__("lock; cmpxchgl %1,%2"
113 : "q" (new_value), "m" (*ptr), "0" (old_value)
115 # elif defined(__ia64__)
118 __asm__("zxt4 %1=%1": "=r"(prev) : "0"(prev));
119 __asm__ __volatile__("addp4 %1=0,%1;;\n"
120 "mov ar.ccv=%[old] ;; cmpxchg 4"
121 ".acq %0=[%1],%[new_val],ar.ccv"
122 : "=r"(prev) "1"(addr),
123 : "=r"(addr), [new_value]"r"(new_value), [old_value]"r"(old_value)
126 __asm__ __volatile__(
127 "mov ar.ccv=%[old] ;; cmpxchg 8"
128 ".acq %0=[%1],%[new_val],ar.ccv"
130 : "r"(ptr), [new_value]"r"(new_value),
131 [old_value]"r"(old_value)
133 # endif // defined(_ILP32)
134 # elif defined(__alpha__)
136 __asm__ __volatile__(
144 :"=&r" (prev), "=m" (*ptr), "=&r" (was_equal)
145 : "r" (new_value), "Ir" (old_value)
147 #elif defined(__sparc__)
148 #error No CAS implementation for SPARC yet.
149 #elif defined(__powerpc__) || defined(__ppc__)
151 __asm__ __volatile__(
152 "1:lwarx %0,0,%2\n" /* load and reserve */
153 "cmpw %0, %4\n" /* if load is not equal to */
154 "bne 2f\n" /* old, fail */
155 "stwcx. %3,0,%2\n" /* else store conditional */
156 "bne- 1b\n" /* retry if lost reservation */
157 "li %1,1\n" /* result = 1; */
159 : "=&r"(prev), "=&r"(result)
160 : "r"(ptr), "r"(new_value), "r"(old_value), "1"(result)
162 #elif defined(__arm__)
164 __asm__ __volatile__ (
171 "swp %0,%3,[%2] \n\t"
173 "swpne %1,%0,[%2] \n\t"
178 : "=&r"(result), "=&r"(prev)
179 : "r" ptr), "r" (new_value), "r" (old_value)
181 #endif // defined(__i386__)
185 #elif defined(_MSC_VER) && _M_IX86 > 400
186 inline void MemoryFence() {
188 InterlockedExchanged((LONG volatile *)&dummy, (LONG)0);
191 typedef DWORD cas_flag;
192 inline cas_flag CompareAndSwap(cas_flag* ptr,
194 cas_flag old_value) {
195 /* FIXME - This is nearly useless on win64. */
196 /* Use InterlockedCompareExchange64 for win64? */
197 return InterlockedCompareExchange((DWORD volatile *)addr,
198 (DWORD)new_value, (DWORD) old_value)
201 #error No atomics implementation found for your platform.
202 #endif // !defined(ENABLE_THREADS) || ENABLE_THREADS == 0