7 #define __xg(x) ((volatile INTPTR *)(x))
9 #define CFENCE asm volatile("":::"memory");
12 ".section .smp_locks,\"a\"\n" \
14 " .long 661f\n" /* address */\
19 static inline void atomic_dec(volatile int *v) {
20 __asm__ __volatile__ (LOCK_PREFIX "decl %0"
24 static inline void atomic_inc(volatile int *v) {
25 __asm__ __volatile__ (LOCK_PREFIX "incl %0"
29 static inline int atomic_sub_and_test(int i, volatile int *v) {
32 __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
33 : "+m" (*v), "=qm" (c)
34 : "ir" (i) : "memory");
39 static inline INTPTR LOCKXCHG(volatile INTPTR * ptr, INTPTR val){
41 //note: xchgl always implies lock
42 __asm__ __volatile__("xchgq %0,%1"
50 static inline int LOCKXCHG(volatile int* ptr, int val){
52 //note: xchgl always implies lock
53 __asm__ __volatile__("xchgl %0,%1"
63 static inline int write_trylock(volatile int *lock) {
65 __asm__ __volatile__("xchgl %0,%1"
67 : "m"(*lock), "0"(retval)
74 static inline INTPTR CAS(volatile void *ptr, unsigned INTPTR old, unsigned INTPTR new){
76 __asm__ __volatile__("lock; cmpxchgq %1,%2"
78 : "r"(new), "m"(*__xg(ptr)), "0"(old)
83 static inline long CAS(volatile void *ptr, unsigned long old, unsigned long new){
85 __asm__ __volatile__("lock; cmpxchgl %k1,%2"
87 : "r"(new), "m"(*__xg(ptr)), "0"(old)
93 static inline int BARRIER(){