Refactor futex atomic operations using ll/sc method with clearing PSTATE.PAN to prepare to apply FEAT_LSUI on them. Signed-off-by: Yeoreum Yun Reviewed-by: Catalin Marinas --- arch/arm64/include/asm/futex.h | 137 +++++++++++++++++++++------------ 1 file changed, 87 insertions(+), 50 deletions(-) diff --git a/arch/arm64/include/asm/futex.h b/arch/arm64/include/asm/futex.h index bc06691d2062..9a0efed50743 100644 --- a/arch/arm64/include/asm/futex.h +++ b/arch/arm64/include/asm/futex.h @@ -7,21 +7,25 @@ #include #include +#include #include #define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */ -#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \ -do { \ +#define LLSC_FUTEX_ATOMIC_OP(op, insn) \ +static __always_inline int \ +__llsc_futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \ +{ \ unsigned int loops = FUTEX_MAX_LOOPS; \ + int ret, oldval, newval; \ \ uaccess_enable_privileged(); \ - asm volatile( \ + asm volatile("// __llsc_futex_atomic_" #op "\n" \ " prfm pstl1strm, %2\n" \ -"1: ldxr %w1, %2\n" \ +"1: ldxr %w[oldval], %2\n" \ insn "\n" \ -"2: stlxr %w0, %w3, %2\n" \ +"2: stlxr %w0, %w[newval], %2\n" \ " cbz %w0, 3f\n" \ " sub %w4, %w4, %w0\n" \ " cbnz %w4, 1b\n" \ @@ -30,50 +34,109 @@ do { \ " dmb ish\n" \ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \ - : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \ + : "=&r" (ret), [oldval] "=&r" (oldval), "+Q" (*uaddr), \ + [newval] "=&r" (newval), \ "+r" (loops) \ - : "r" (oparg), "Ir" (-EAGAIN) \ + : [oparg] "r" (oparg), "Ir" (-EAGAIN) \ : "memory"); \ uaccess_disable_privileged(); \ -} while (0) + \ + if (!ret) \ + *oval = oldval; \ + \ + return ret; \ +} + +LLSC_FUTEX_ATOMIC_OP(add, "add %w[newval], %w[oldval], %w[oparg]") +LLSC_FUTEX_ATOMIC_OP(or, "orr %w[newval], %w[oldval], %w[oparg]") +LLSC_FUTEX_ATOMIC_OP(and, "and %w[newval], %w[oldval], %w[oparg]") +LLSC_FUTEX_ATOMIC_OP(eor, "eor %w[newval], %w[oldval], %w[oparg]") +LLSC_FUTEX_ATOMIC_OP(set, "mov %w[newval], %w[oparg]") + +static __always_inline int +__llsc_futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval) +{ + int ret = 0; + unsigned int loops = FUTEX_MAX_LOOPS; + u32 val, tmp; + + uaccess_enable_privileged(); + asm volatile("//__llsc_futex_cmpxchg\n" +" prfm pstl1strm, %2\n" +"1: ldxr %w1, %2\n" +" eor %w3, %w1, %w5\n" +" cbnz %w3, 4f\n" +"2: stlxr %w3, %w6, %2\n" +" cbz %w3, 3f\n" +" sub %w4, %w4, %w3\n" +" cbnz %w4, 1b\n" +" mov %w0, %w7\n" +"3:\n" +" dmb ish\n" +"4:\n" + _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0) + _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0) + : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops) + : "r" (oldval), "r" (newval), "Ir" (-EAGAIN) + : "memory"); + uaccess_disable_privileged(); + + if (!ret) + *oval = val; + + return ret; +} + +#define FUTEX_ATOMIC_OP(op) \ +static __always_inline int \ +__futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \ +{ \ + return __llsc_futex_atomic_##op(oparg, uaddr, oval); \ +} + +FUTEX_ATOMIC_OP(add) +FUTEX_ATOMIC_OP(or) +FUTEX_ATOMIC_OP(and) +FUTEX_ATOMIC_OP(eor) +FUTEX_ATOMIC_OP(set) + +static __always_inline int +__futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval) +{ + return __llsc_futex_cmpxchg(uaddr, oldval, newval, oval); +} static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr) { - int oldval = 0, ret, tmp; - u32 __user *uaddr = __uaccess_mask_ptr(_uaddr); + int ret; + u32 __user *uaddr; if (!access_ok(_uaddr, sizeof(u32))) return -EFAULT; + uaddr = __uaccess_mask_ptr(_uaddr); + switch (op) { case FUTEX_OP_SET: - __futex_atomic_op("mov %w3, %w5", - ret, oldval, uaddr, tmp, oparg); + ret = __futex_atomic_set(oparg, uaddr, oval); break; case FUTEX_OP_ADD: - __futex_atomic_op("add %w3, %w1, %w5", - ret, oldval, uaddr, tmp, oparg); + ret = __futex_atomic_add(oparg, uaddr, oval); break; case FUTEX_OP_OR: - __futex_atomic_op("orr %w3, %w1, %w5", - ret, oldval, uaddr, tmp, oparg); + ret = __futex_atomic_or(oparg, uaddr, oval); break; case FUTEX_OP_ANDN: - __futex_atomic_op("and %w3, %w1, %w5", - ret, oldval, uaddr, tmp, ~oparg); + ret = __futex_atomic_and(~oparg, uaddr, oval); break; case FUTEX_OP_XOR: - __futex_atomic_op("eor %w3, %w1, %w5", - ret, oldval, uaddr, tmp, oparg); + ret = __futex_atomic_eor(oparg, uaddr, oval); break; default: ret = -ENOSYS; } - if (!ret) - *oval = oldval; - return ret; } @@ -81,40 +144,14 @@ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr, u32 oldval, u32 newval) { - int ret = 0; - unsigned int loops = FUTEX_MAX_LOOPS; - u32 val, tmp; u32 __user *uaddr; if (!access_ok(_uaddr, sizeof(u32))) return -EFAULT; uaddr = __uaccess_mask_ptr(_uaddr); - uaccess_enable_privileged(); - asm volatile("// futex_atomic_cmpxchg_inatomic\n" -" prfm pstl1strm, %2\n" -"1: ldxr %w1, %2\n" -" sub %w3, %w1, %w5\n" -" cbnz %w3, 4f\n" -"2: stlxr %w3, %w6, %2\n" -" cbz %w3, 3f\n" -" sub %w4, %w4, %w3\n" -" cbnz %w4, 1b\n" -" mov %w0, %w7\n" -"3:\n" -" dmb ish\n" -"4:\n" - _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0) - _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0) - : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops) - : "r" (oldval), "r" (newval), "Ir" (-EAGAIN) - : "memory"); - uaccess_disable_privileged(); - - if (!ret) - *uval = val; - return ret; + return __futex_cmpxchg(uaddr, oldval, newval, uval); } #endif /* __ASM_FUTEX_H */ -- LEVI:{C3F47F37-75D8-414A-A8BA-3980EC8A46D7}