Revert "FROMLIST: arm64: Factor out PAN enabling/disabling into separate uaccess_...
authorSami Tolvanen <samitolvanen@google.com>
Wed, 14 Dec 2016 20:32:56 +0000 (12:32 -0800)
committerAmit Pundir <amit.pundir@linaro.org>
Mon, 16 Jan 2017 09:12:37 +0000 (14:42 +0530)
This reverts commit 23368b642deb01ac6ce668ec1dedfcc0cab25c71.

Bug: 31432001
Change-Id: Ia59e5fc75ef905b89d5f9194f1e762c1e5eff5bf
Signed-off-by: Sami Tolvanen <samitolvanen@google.com>
arch/arm64/include/asm/futex.h
arch/arm64/include/asm/uaccess.h
arch/arm64/kernel/armv8_deprecated.c
arch/arm64/lib/clear_user.S
arch/arm64/lib/copy_from_user.S
arch/arm64/lib/copy_in_user.S
arch/arm64/lib/copy_to_user.S

index 71dfa3b4231364363e08bd474ea474cc7fffd89c..f2585cdd32c29832566718e99d7b5fd9c61d2322 100644 (file)
@@ -27,9 +27,9 @@
 #include <asm/sysreg.h>
 
 #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)                \
-do {                                                                   \
-       uaccess_enable();                                               \
        asm volatile(                                                   \
+       ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN,            \
+                   CONFIG_ARM64_PAN)                                   \
 "      prfm    pstl1strm, %2\n"                                        \
 "1:    ldxr    %w1, %2\n"                                              \
        insn "\n"                                                       \
@@ -44,11 +44,11 @@ do {                                                                        \
 "      .popsection\n"                                                  \
        _ASM_EXTABLE(1b, 4b)                                            \
        _ASM_EXTABLE(2b, 4b)                                            \
+       ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN,            \
+                   CONFIG_ARM64_PAN)                                   \
        : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp)       \
        : "r" (oparg), "Ir" (-EFAULT)                                   \
-       : "memory");                                                    \
-       uaccess_disable();                                              \
-} while (0)
+       : "memory")
 
 static inline int
 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
@@ -118,8 +118,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
                return -EFAULT;
 
-       uaccess_enable();
        asm volatile("// futex_atomic_cmpxchg_inatomic\n"
+ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, CONFIG_ARM64_PAN)
 "      prfm    pstl1strm, %2\n"
 "1:    ldxr    %w1, %2\n"
 "      sub     %w3, %w1, %w4\n"
@@ -134,10 +134,10 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 "      .popsection\n"
        _ASM_EXTABLE(1b, 4b)
        _ASM_EXTABLE(2b, 4b)
+ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, CONFIG_ARM64_PAN)
        : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp)
        : "r" (oldval), "r" (newval), "Ir" (-EFAULT)
        : "memory");
-       uaccess_disable();
 
        *uval = val;
        return ret;
index c8ef22a9a83bfb7d543c8ae9462e9ca0379db05d..c3d445b42351e1a529d94c4ba44e9c4d37544821 100644 (file)
@@ -18,8 +18,6 @@
 #ifndef __ASM_UACCESS_H
 #define __ASM_UACCESS_H
 
-#ifndef __ASSEMBLY__
-
 /*
  * User space memory access functions
  */
@@ -125,44 +123,6 @@ static inline void set_fs(mm_segment_t fs)
        "       .long           (" #from " - .), (" #to " - .)\n"       \
        "       .popsection\n"
 
-/*
- * User access enabling/disabling.
- */
-#define __uaccess_disable(alt)                                         \
-do {                                                                   \
-       asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), alt,                  \
-                       CONFIG_ARM64_PAN));                             \
-} while (0)
-
-#define __uaccess_enable(alt)                                          \
-do {                                                                   \
-       asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), alt,                  \
-                       CONFIG_ARM64_PAN));                             \
-} while (0)
-
-static inline void uaccess_disable(void)
-{
-       __uaccess_disable(ARM64_HAS_PAN);
-}
-
-static inline void uaccess_enable(void)
-{
-       __uaccess_enable(ARM64_HAS_PAN);
-}
-
-/*
- * These functions are no-ops when UAO is present.
- */
-static inline void uaccess_disable_not_uao(void)
-{
-       __uaccess_disable(ARM64_ALT_PAN_NOT_UAO);
-}
-
-static inline void uaccess_enable_not_uao(void)
-{
-       __uaccess_enable(ARM64_ALT_PAN_NOT_UAO);
-}
-
 /*
  * The "__xxx" versions of the user access functions do not verify the address
  * space - it must have been done previously with a separate "access_ok()"
@@ -190,7 +150,8 @@ static inline void uaccess_enable_not_uao(void)
 do {                                                                   \
        unsigned long __gu_val;                                         \
        __chk_user_ptr(ptr);                                            \
-       uaccess_enable_not_uao();                                       \
+       asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_ALT_PAN_NOT_UAO,\
+                       CONFIG_ARM64_PAN));                             \
        switch (sizeof(*(ptr))) {                                       \
        case 1:                                                         \
                __get_user_asm("ldrb", "ldtrb", "%w", __gu_val, (ptr),  \
@@ -211,8 +172,9 @@ do {                                                                        \
        default:                                                        \
                BUILD_BUG();                                            \
        }                                                               \
-       uaccess_disable_not_uao();                                      \
        (x) = (__force __typeof__(*(ptr)))__gu_val;                     \
+       asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_ALT_PAN_NOT_UAO,\
+                       CONFIG_ARM64_PAN));                             \
 } while (0)
 
 #define __get_user(x, ptr)                                             \
@@ -257,7 +219,8 @@ do {                                                                        \
 do {                                                                   \
        __typeof__(*(ptr)) __pu_val = (x);                              \
        __chk_user_ptr(ptr);                                            \
-       uaccess_enable_not_uao();                                       \
+       asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_ALT_PAN_NOT_UAO,\
+                       CONFIG_ARM64_PAN));                             \
        switch (sizeof(*(ptr))) {                                       \
        case 1:                                                         \
                __put_user_asm("strb", "sttrb", "%w", __pu_val, (ptr),  \
@@ -278,7 +241,8 @@ do {                                                                        \
        default:                                                        \
                BUILD_BUG();                                            \
        }                                                               \
-       uaccess_disable_not_uao();                                      \
+       asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_ALT_PAN_NOT_UAO,\
+                       CONFIG_ARM64_PAN));                             \
 } while (0)
 
 #define __put_user(x, ptr)                                             \
@@ -363,31 +327,4 @@ extern long strncpy_from_user(char *dest, const char __user *src, long count);
 extern __must_check long strlen_user(const char __user *str);
 extern __must_check long strnlen_user(const char __user *str, long n);
 
-#else  /* __ASSEMBLY__ */
-
-#include <asm/alternative.h>
-#include <asm/assembler.h>
-
-/*
- * User access enabling/disabling macros. These are no-ops when UAO is
- * present.
- */
-       .macro  uaccess_disable_not_uao, tmp1
-alternative_if_not ARM64_ALT_PAN_NOT_UAO
-       nop
-alternative_else
-       SET_PSTATE_PAN(1)
-alternative_endif
-       .endm
-
-       .macro  uaccess_enable_not_uao, tmp1, tmp2
-alternative_if_not ARM64_ALT_PAN_NOT_UAO
-       nop
-alternative_else
-       SET_PSTATE_PAN(0)
-alternative_endif
-       .endm
-
-#endif /* __ASSEMBLY__ */
-
 #endif /* __ASM_UACCESS_H */
index a0a0f2b20608bad8b9e92e23102c0a93913dc388..29348947652985e1556e4c7225cbfd17774a4d87 100644 (file)
@@ -281,9 +281,9 @@ static void __init register_insn_emulation_sysctl(struct ctl_table *table)
  * Error-checking SWP macros implemented using ldxr{b}/stxr{b}
  */
 #define __user_swpX_asm(data, addr, res, temp, B)              \
-do {                                                           \
-       uaccess_enable();                                       \
        __asm__ __volatile__(                                   \
+       ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN,    \
+                   CONFIG_ARM64_PAN)                           \
        "0:     ldxr"B"         %w2, [%3]\n"                    \
        "1:     stxr"B"         %w0, %w1, [%3]\n"               \
        "       cbz             %w0, 2f\n"                      \
@@ -299,11 +299,11 @@ do {                                                              \
        "       .popsection"                                    \
        _ASM_EXTABLE(0b, 4b)                                    \
        _ASM_EXTABLE(1b, 4b)                                    \
+       ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN,    \
+               CONFIG_ARM64_PAN)                               \
        : "=&r" (res), "+r" (data), "=&r" (temp)                \
        : "r" (addr), "i" (-EAGAIN), "i" (-EFAULT)              \
-       : "memory");                                            \
-       uaccess_disable();                                      \
-} while (0)
+       : "memory")
 
 #define __user_swp_asm(data, addr, res, temp) \
        __user_swpX_asm(data, addr, res, temp, "")
index 08b5f18ba604f99461f1c879bbecef8cdb1f53c5..5d1cad3ce6d601aa474ae9c9b8ef4c76a785912e 100644 (file)
  */
 #include <linux/linkage.h>
 
+#include <asm/alternative.h>
 #include <asm/assembler.h>
 #include <asm/cpufeature.h>
 #include <asm/sysreg.h>
-#include <asm/uaccess.h>
 
        .text
 
@@ -33,7 +33,8 @@
  * Alignment fixed up by hardware.
  */
 ENTRY(__clear_user)
-       uaccess_enable_not_uao x2, x3
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(0)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        mov     x2, x1                  // save the size for fixup return
        subs    x1, x1, #8
        b.mi    2f
@@ -53,7 +54,8 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2
        b.mi    5f
 uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
 5:     mov     x0, #0
-       uaccess_disable_not_uao x2
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(1)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        ret
 ENDPROC(__clear_user)
 
index 6505ec81f1da18ac52427db04821259135c7fbd2..0b90497d4424c59d0a9ce2dcf5642012f452d3c8 100644 (file)
 
 #include <linux/linkage.h>
 
+#include <asm/alternative.h>
 #include <asm/assembler.h>
 #include <asm/cache.h>
 #include <asm/cpufeature.h>
 #include <asm/sysreg.h>
-#include <asm/uaccess.h>
 
 /*
  * Copy from user space to a kernel buffer (alignment handled by the hardware)
 
 end    .req    x5
 ENTRY(__arch_copy_from_user)
-       uaccess_enable_not_uao x3, x4
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(0)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        add     end, x0, x2
 #include "copy_template.S"
-       uaccess_disable_not_uao x3
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(1)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        mov     x0, #0                          // Nothing to copy
        ret
 ENDPROC(__arch_copy_from_user)
index 9b04ff3ab6101d29ef8b6be48453260f9d3ffced..f7292dd08c840f27d39874fe7cc08aa89bdfb66d 100644 (file)
 
 #include <linux/linkage.h>
 
+#include <asm/alternative.h>
 #include <asm/assembler.h>
 #include <asm/cache.h>
 #include <asm/cpufeature.h>
 #include <asm/sysreg.h>
-#include <asm/uaccess.h>
 
 /*
  * Copy from user space to user space (alignment handled by the hardware)
 
 end    .req    x5
 ENTRY(__copy_in_user)
-       uaccess_enable_not_uao x3, x4
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(0)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        add     end, x0, x2
 #include "copy_template.S"
-       uaccess_disable_not_uao x3
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(1)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        mov     x0, #0
        ret
 ENDPROC(__copy_in_user)
index 8077e4f34d56b7f5b2b9a1ed9c5b53f23f573547..7a7efe25503452bdfe8e4108b5f4aa0ad9495da5 100644 (file)
 
 #include <linux/linkage.h>
 
+#include <asm/alternative.h>
 #include <asm/assembler.h>
 #include <asm/cache.h>
 #include <asm/cpufeature.h>
 #include <asm/sysreg.h>
-#include <asm/uaccess.h>
 
 /*
  * Copy to user space from a kernel buffer (alignment handled by the hardware)
 
 end    .req    x5
 ENTRY(__arch_copy_to_user)
-       uaccess_enable_not_uao x3, x4
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(0)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        add     end, x0, x2
 #include "copy_template.S"
-       uaccess_disable_not_uao x3
+ALTERNATIVE("nop", __stringify(SET_PSTATE_PAN(1)), ARM64_ALT_PAN_NOT_UAO, \
+           CONFIG_ARM64_PAN)
        mov     x0, #0
        ret
 ENDPROC(__arch_copy_to_user)