8 #define CFENCE asm volatile("":::"memory");
11 ".section .smp_locks,\"a\"\n" \
13 " .long 661f\n" /* address */\
18 static inline void atomic_dec(volatile int *v) {
19 __asm__ __volatile__ (LOCK_PREFIX "decl %0"
23 static inline void atomic_inc(volatile int *v) {
24 __asm__ __volatile__ (LOCK_PREFIX "incl %0"
28 static inline int atomic_sub_and_test(int i, volatile int *v) {
31 __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
32 : "+m" (*v), "=qm" (c)
33 : "ir" (i) : "memory");
37 static inline int LOCKXCHG(volatile int* ptr, int val){
40 //note: xchgl always implies lock
41 __asm__ __volatile__("xchgl %0,%1"
50 static inline int write_trylock(volatile int *lock) {
52 __asm__ __volatile__("xchgl %0,%1"
54 : "m"(*lock), "0"(retval)
60 static inline int CAS(volatile int* mem, int cmp, int val){
62 asm volatile ("lock; cmpxchgl %1, %2"
64 : "r" (val), "m" (*(mem)), "0"(cmp)
69 static inline int BARRIER(){