From 67a35a73ee90842b8f8a6b71ff088ca94fc57e21 Mon Sep 17 00:00:00 2001 From: bdemsky Date: Thu, 3 Dec 2009 01:06:44 +0000 Subject: [PATCH] fix read lock bug --- Robust/src/Runtime/STM/stmlock.c | 4 ++-- Robust/src/Runtime/STM/stmlock.h | 28 ++++++++++++++-------------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/Robust/src/Runtime/STM/stmlock.c b/Robust/src/Runtime/STM/stmlock.c index 0647aed3..d8862db7 100644 --- a/Robust/src/Runtime/STM/stmlock.c +++ b/Robust/src/Runtime/STM/stmlock.c @@ -3,7 +3,7 @@ /* -int write_trylock(volatile unsigned int *lock) { +int write_trylock(volatile int *lock) { int retval=0; __asm__ __volatile__("xchgl %0,%1" : "=r"(retval) @@ -13,7 +13,7 @@ int write_trylock(volatile unsigned int *lock) { } -void write_unlock(volatile unsigned int *lock) { +void write_unlock(volatile int *lock) { __asm __volatile__("movl $1, %0" : "+m" (*lock)::"memory"); } */ diff --git a/Robust/src/Runtime/STM/stmlock.h b/Robust/src/Runtime/STM/stmlock.h index d74d2e37..a22f96fa 100644 --- a/Robust/src/Runtime/STM/stmlock.h +++ b/Robust/src/Runtime/STM/stmlock.h @@ -16,7 +16,7 @@ ".previous\n" \ "661:\n\tlock; " -static inline initdsmlocks(volatile unsigned int *addr) { +static inline initdsmlocks(volatile int *addr) { (*addr) = SWAP_LOCK_BIAS; } //int write_trylock(volatile unsigned int *lock); @@ -28,7 +28,7 @@ static inline void initdsmlocks(volatile unsigned int *addr) { } */ -static inline int write_trylock(volatile unsigned int *lock) { +static inline int write_trylock(volatile int *lock) { int retval=0; __asm__ __volatile__("xchgl %0,%1" : "=r"(retval) @@ -37,42 +37,42 @@ static inline int write_trylock(volatile unsigned int *lock) { return retval; } -static inline void write_unlock(volatile unsigned int *lock) { +static inline void write_unlock(volatile int *lock) { __asm__ __volatile__("movl $1, %0" : "+m" (*lock)::"memory"); } -static inline void atomic_add(int i, volatile unsigned int *v) { +static inline void atomic_add(int i, volatile int *v) { __asm__ __volatile__ (LOCK_PREFIX "addl %1,%0" : "+m" (*v) : "ir" (i)); } -static inline void rwread_unlock(volatile unsigned int *rw) { +static inline void rwread_unlock(volatile int *rw) { __asm__ __volatile__ (LOCK_PREFIX "incl %0" : "+m" (*rw) : : "memory"); } -static inline void rwwrite_unlock(volatile unsigned int *rw) { +static inline void rwwrite_unlock(volatile int *rw) { __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0" : "+m" (*rw) : "i" (RW_LOCK_BIAS) : "memory"); } -static inline void rwconvert_unlock(volatile unsigned int *rw) { +static inline void rwconvert_unlock(volatile int *rw) { __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0" : "+m" (*rw) : "i" (RW_LOCK_BIAS-1) : "memory"); } -static inline void atomic_dec(volatile unsigned int *v) { +static inline void atomic_dec(volatile int *v) { __asm__ __volatile__ (LOCK_PREFIX "decl %0" : "+m" (*v)); } -static inline void atomic_inc(volatile unsigned int *v) { +static inline void atomic_inc(volatile int *v) { __asm__ __volatile__ (LOCK_PREFIX "incl %0" : "+m" (*v)); } -static inline int atomic_sub_and_test(int i, volatile unsigned int *v) { +static inline int atomic_sub_and_test(int i, volatile int *v) { unsigned char c; __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1" @@ -81,9 +81,9 @@ static inline int atomic_sub_and_test(int i, volatile unsigned int *v) { return c; } -static inline int rwwrite_trylock(volatile unsigned int *ptr) { +static inline int rwwrite_trylock(volatile int *ptr) { //static inline unsigned long cas(volatile unsigned int* ptr) { - unsigned int prev; + int prev; __asm__ __volatile__("lock;" "cmpxchgl %1, %2;" : "=a"(prev) @@ -95,7 +95,7 @@ static inline int rwwrite_trylock(volatile unsigned int *ptr) { #define atomic_read(v) (*v) -static inline int rwread_trylock(volatile unsigned int *lock) { +static inline int rwread_trylock(volatile int *lock) { atomic_dec(lock); if (likely(atomic_read(lock) >=0 )) return 1; //can aquire a new read lock @@ -111,7 +111,7 @@ static inline int rwread_trylock(volatile unsigned int *lock) { // return 0; // failed to acquire a write lock //} -static inline int rwconvert_trylock(volatile unsigned int *lock) { +static inline int rwconvert_trylock(volatile int *lock) { if (likely(atomic_sub_and_test((RW_LOCK_BIAS-1), lock))) { return 1; // get a write lock } -- 2.34.1