return c;
}
-#ifdef BIT64
-static inline INTPTR LOCKXCHG(volatile INTPTR * ptr, INTPTR val){
- INTPTR retval;
+static inline int LOCKXCHG32(volatile int* ptr, int val){
+ int retval;
//note: xchgl always implies lock
- __asm__ __volatile__("xchgq %0,%1"
+ __asm__ __volatile__("xchgl %0,%1"
: "=r"(retval)
: "m"(*ptr), "0"(val)
: "memory");
return retval;
}
-#else
-static inline int LOCKXCHG(volatile int* ptr, int val){
- int retval;
+
+#ifdef BIT64
+static inline INTPTR LOCKXCHG(volatile INTPTR * ptr, INTPTR val){
+ INTPTR retval;
//note: xchgl always implies lock
- __asm__ __volatile__("xchgl %0,%1"
+ __asm__ __volatile__("xchgq %0,%1"
: "=r"(retval)
: "m"(*ptr), "0"(val)
: "memory");
return retval;
}
+#else
+#define LOCKXCHG LOCKXCHG32
#endif
/*