7 #define __xg(x) ((volatile long *)(x))
9 #define CFENCE asm volatile("":::"memory");
12 ".section .smp_locks,\"a\"\n" \
14 " .long 661f\n" /* address */\
19 static inline void atomic_dec(volatile int *v) {
20 __asm__ __volatile__ (LOCK_PREFIX "decl %0"
24 static inline void atomic_inc(volatile int *v) {
25 __asm__ __volatile__ (LOCK_PREFIX "incl %0"
29 static inline int atomic_sub_and_test(int i, volatile int *v) {
32 __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
33 : "+m" (*v), "=qm" (c)
34 : "ir" (i) : "memory");
38 static inline int LOCKXCHG(volatile int* ptr, int val){
41 //note: xchgl always implies lock
42 __asm__ __volatile__("xchgl %0,%1"
51 static inline int write_trylock(volatile int *lock) {
53 __asm__ __volatile__("xchgl %0,%1"
55 : "m"(*lock), "0"(retval)
61 static inline int CAS(volatile int* mem, int cmp, int val){
63 asm volatile ("lock; cmpxchgl %1, %2"
65 : "r" (val), "m" (*(mem)), "0"(cmp)
70 static inline long CAS32(volatile void *ptr, unsigned long old, unsigned long new){
72 __asm__ __volatile__("lock; cmpxchgl %k1,%2"
74 : "r"(new), "m"(*__xg(ptr)), "0"(old)
79 static inline long long CAS64(volatile void *ptr, unsigned long long old, unsigned long long new){
80 unsigned long long prev;
81 __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
83 : "r"(new), "m"(*__xg(ptr)), "0"(old)
88 static inline int BARRIER(){