Merge branch 'x86/mpx' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
[firefly-linux-kernel-4.4.55.git] / arch / x86 / include / asm / uaccess.h
index 6f1bb74d547beed3459f9bdb1eabff5965bc2881..0d592e0a5b84fa3c3738ce8400e34df5bb552eb0 100644 (file)
@@ -533,6 +533,98 @@ extern __must_check long strnlen_user(const char __user *str, long n);
 unsigned long __must_check clear_user(void __user *mem, unsigned long len);
 unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
 
+extern void __cmpxchg_wrong_size(void)
+       __compiletime_error("Bad argument size for cmpxchg");
+
+#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size)      \
+({                                                                     \
+       int __ret = 0;                                                  \
+       __typeof__(ptr) __uval = (uval);                                \
+       __typeof__(*(ptr)) __old = (old);                               \
+       __typeof__(*(ptr)) __new = (new);                               \
+       switch (size) {                                                 \
+       case 1:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "q" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 2:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 4:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 8:                                                         \
+       {                                                               \
+               if (!IS_ENABLED(CONFIG_X86_64))                         \
+                       __cmpxchg_wrong_size();                         \
+                                                                       \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       default:                                                        \
+               __cmpxchg_wrong_size();                                 \
+       }                                                               \
+       *__uval = __old;                                                \
+       __ret;                                                          \
+})
+
+#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new)              \
+({                                                                     \
+       access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ?                \
+               __user_atomic_cmpxchg_inatomic((uval), (ptr),           \
+                               (old), (new), sizeof(*(ptr))) :         \
+               -EFAULT;                                                \
+})
+
 /*
  * movsl can be slow when source and dest are not both 8-byte aligned
  */