4 #define likely(x) __builtin_expect((x),1)
5 #define unlikely(x) __builtin_expect((x),0)
7 #define SWAP_LOCK_BIAS 1
8 #define CFENCE asm volatile("":::"memory");
10 #define RW_LOCK_BIAS 0x01000000
13 ".section .smp_locks,\"a\"\n" \
15 " .long 661f\n" /* address */\
19 static inline initdsmlocks(volatile unsigned int *addr) {
20 (*addr) = SWAP_LOCK_BIAS;
22 //int write_trylock(volatile unsigned int *lock);
23 //void write_unlock(volatile unsigned int *lock);
26 static inline void initdsmlocks(volatile unsigned int *addr) {
27 (*addr) = RW_LOCK_BIAS;
31 static inline int write_trylock(volatile unsigned int *lock) {
33 __asm__ __volatile__("xchgl %0,%1"
35 : "m"(*lock), "0"(retval)
40 static inline void write_unlock(volatile unsigned int *lock) {
41 __asm__ __volatile__("movl $1, %0" : "+m" (*lock)::"memory");
45 static inline void atomic_add(int i, volatile unsigned int *v) {
46 __asm__ __volatile__ (LOCK_PREFIX "addl %1,%0"
51 static inline void rwread_unlock(volatile unsigned int *rw) {
52 __asm__ __volatile__ (LOCK_PREFIX "incl %0" : "+m" (*rw) : : "memory");
55 static inline void rwwrite_unlock(volatile unsigned int *rw) {
56 __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0"
57 : "+m" (*rw) : "i" (RW_LOCK_BIAS) : "memory");
60 static inline void rwconvert_unlock(volatile unsigned int *rw) {
61 __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0"
62 : "+m" (*rw) : "i" (RW_LOCK_BIAS-1) : "memory");
65 static inline void atomic_dec(volatile unsigned int *v) {
66 __asm__ __volatile__ (LOCK_PREFIX "decl %0"
70 static inline void atomic_inc(volatile unsigned int *v) {
71 __asm__ __volatile__ (LOCK_PREFIX "incl %0"
75 static inline int atomic_sub_and_test(int i, volatile unsigned int *v) {
78 __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
79 : "+m" (*v), "=qm" (c)
80 : "ir" (i) : "memory");
84 static inline unsigned long cas(volatile unsigned int* ptr) {
86 __asm__ __volatile__("lock;"
89 : "r"(0), "m"(*ptr), "a"(RW_LOCK_BIAS)
91 return prev==RW_LOCK_BIAS;
95 #define atomic_read(v) (*v)
97 static inline int rwread_trylock(volatile unsigned int *lock) {
99 if (likely(atomic_read(lock) >=0 ))
100 return 1; //can aquire a new read lock
105 static inline int rwwrite_trylock(volatile unsigned int *lock) {
106 if (likely(cas(lock))) {
109 // if (likely(atomic_sub_and_test(RW_LOCK_BIAS, lock))) {
110 // return 1; // get a write lock
112 atomic_add(RW_LOCK_BIAS, lock);
113 return 0; // failed to acquire a write lock
116 static inline int rwconvert_trylock(volatile unsigned int *lock) {
117 if (likely(atomic_sub_and_test((RW_LOCK_BIAS-1), lock))) {
118 return 1; // get a write lock
120 atomic_add((RW_LOCK_BIAS-1), lock);
121 return 0; // failed to acquire a write lock