Skip to content

Commit e02718c

Browse files
KAGA-KOKOingomolnar
authored andcommitted
x86/futex: Convert to scoped user access
Replace the open coded implementation with the scoped user access guards No functional change intended. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Ingo Molnar <mingo@kernel.org> Link: https://patch.msgid.link/20251027083745.799714344@linutronix.de
1 parent e4e28fd commit e02718c

1 file changed

Lines changed: 33 additions & 42 deletions

File tree

arch/x86/include/asm/futex.h

Lines changed: 33 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -46,38 +46,31 @@ do { \
4646
} while(0)
4747

4848
static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
49-
u32 __user *uaddr)
49+
u32 __user *uaddr)
5050
{
51-
if (can_do_masked_user_access())
52-
uaddr = masked_user_access_begin(uaddr);
53-
else if (!user_access_begin(uaddr, sizeof(u32)))
54-
return -EFAULT;
55-
56-
switch (op) {
57-
case FUTEX_OP_SET:
58-
unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
59-
break;
60-
case FUTEX_OP_ADD:
61-
unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
62-
uaddr, oparg, Efault);
63-
break;
64-
case FUTEX_OP_OR:
65-
unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
66-
break;
67-
case FUTEX_OP_ANDN:
68-
unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
69-
break;
70-
case FUTEX_OP_XOR:
71-
unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
72-
break;
73-
default:
74-
user_access_end();
75-
return -ENOSYS;
51+
scoped_user_rw_access(uaddr, Efault) {
52+
switch (op) {
53+
case FUTEX_OP_SET:
54+
unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
55+
break;
56+
case FUTEX_OP_ADD:
57+
unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval, uaddr, oparg, Efault);
58+
break;
59+
case FUTEX_OP_OR:
60+
unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
61+
break;
62+
case FUTEX_OP_ANDN:
63+
unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
64+
break;
65+
case FUTEX_OP_XOR:
66+
unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
67+
break;
68+
default:
69+
return -ENOSYS;
70+
}
7671
}
77-
user_access_end();
7872
return 0;
7973
Efault:
80-
user_access_end();
8174
return -EFAULT;
8275
}
8376

@@ -86,21 +79,19 @@ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
8679
{
8780
int ret = 0;
8881

89-
if (can_do_masked_user_access())
90-
uaddr = masked_user_access_begin(uaddr);
91-
else if (!user_access_begin(uaddr, sizeof(u32)))
92-
return -EFAULT;
93-
asm volatile("\n"
94-
"1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
95-
"2:\n"
96-
_ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \
97-
: "+r" (ret), "=a" (oldval), "+m" (*uaddr)
98-
: "r" (newval), "1" (oldval)
99-
: "memory"
100-
);
101-
user_access_end();
102-
*uval = oldval;
82+
scoped_user_rw_access(uaddr, Efault) {
83+
asm_inline volatile("\n"
84+
"1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
85+
"2:\n"
86+
_ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0)
87+
: "+r" (ret), "=a" (oldval), "+m" (*uaddr)
88+
: "r" (newval), "1" (oldval)
89+
: "memory");
90+
*uval = oldval;
91+
}
10392
return ret;
93+
Efault:
94+
return -EFAULT;
10495
}
10596

10697
#endif

0 commit comments

Comments
 (0)