@@ -210,6 +210,25 @@ do { \
210210 (typeof(_var))(unsigned long) pco_old__; \
211211})
212212
213+ #define percpu_try_cmpxchg_op (size , qual , _var , _ovalp , _nval ) \
214+ ({ \
215+ bool success; \
216+ __pcpu_type_##size *pco_oval__ = (__pcpu_type_##size *)(_ovalp); \
217+ __pcpu_type_##size pco_old__ = *pco_oval__; \
218+ __pcpu_type_##size pco_new__ = __pcpu_cast_##size(_nval); \
219+ asm qual (__pcpu_op2_##size("cmpxchg", "%[nval]", \
220+ __percpu_arg([var])) \
221+ CC_SET(z) \
222+ : CC_OUT(z) (success), \
223+ [oval] "+a" (pco_old__), \
224+ [var] "+m" (_var) \
225+ : [nval] __pcpu_reg_##size(, pco_new__) \
226+ : "memory"); \
227+ if (unlikely(!success)) \
228+ *pco_oval__ = pco_old__; \
229+ likely(success); \
230+ })
231+
213232#if defined(CONFIG_X86_32 ) && !defined(CONFIG_UML )
214233#define percpu_cmpxchg64_op (size , qual , _var , _oval , _nval ) \
215234({ \
@@ -223,26 +242,63 @@ do { \
223242 old__.var = _oval; \
224243 new__.var = _nval; \
225244 \
226- asm qual (ALTERNATIVE("leal %P[var], %%esi; call this_cpu_cmpxchg8b_emu", \
245+ asm qual (ALTERNATIVE("call this_cpu_cmpxchg8b_emu", \
227246 "cmpxchg8b " __percpu_arg([var]), X86_FEATURE_CX8) \
228247 : [var] "+m" (_var), \
229248 "+a" (old__.low), \
230249 "+d" (old__.high) \
231250 : "b" (new__.low), \
232- "c" (new__.high) \
233- : "memory", "esi"); \
251+ "c" (new__.high), \
252+ "S" (&(_var)) \
253+ : "memory"); \
234254 \
235255 old__.var; \
236256})
237257
238258#define raw_cpu_cmpxchg64 (pcp , oval , nval ) percpu_cmpxchg64_op(8, , pcp, oval, nval)
239259#define this_cpu_cmpxchg64 (pcp , oval , nval ) percpu_cmpxchg64_op(8, volatile, pcp, oval, nval)
260+
261+ #define percpu_try_cmpxchg64_op (size , qual , _var , _ovalp , _nval ) \
262+ ({ \
263+ bool success; \
264+ u64 *_oval = (u64 *)(_ovalp); \
265+ union { \
266+ u64 var; \
267+ struct { \
268+ u32 low, high; \
269+ }; \
270+ } old__, new__; \
271+ \
272+ old__.var = *_oval; \
273+ new__.var = _nval; \
274+ \
275+ asm qual (ALTERNATIVE("call this_cpu_cmpxchg8b_emu", \
276+ "cmpxchg8b " __percpu_arg([var]), X86_FEATURE_CX8) \
277+ CC_SET(z) \
278+ : CC_OUT(z) (success), \
279+ [var] "+m" (_var), \
280+ "+a" (old__.low), \
281+ "+d" (old__.high) \
282+ : "b" (new__.low), \
283+ "c" (new__.high), \
284+ "S" (&(_var)) \
285+ : "memory"); \
286+ if (unlikely(!success)) \
287+ *_oval = old__.var; \
288+ likely(success); \
289+ })
290+
291+ #define raw_cpu_try_cmpxchg64 (pcp , ovalp , nval ) percpu_try_cmpxchg64_op(8, , pcp, ovalp, nval)
292+ #define this_cpu_try_cmpxchg64 (pcp , ovalp , nval ) percpu_try_cmpxchg64_op(8, volatile, pcp, ovalp, nval)
240293#endif
241294
242295#ifdef CONFIG_X86_64
243296#define raw_cpu_cmpxchg64 (pcp , oval , nval ) percpu_cmpxchg_op(8, , pcp, oval, nval);
244297#define this_cpu_cmpxchg64 (pcp , oval , nval ) percpu_cmpxchg_op(8, volatile, pcp, oval, nval);
245298
299+ #define raw_cpu_try_cmpxchg64 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(8, , pcp, ovalp, nval);
300+ #define this_cpu_try_cmpxchg64 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(8, volatile, pcp, ovalp, nval);
301+
246302#define percpu_cmpxchg128_op (size , qual , _var , _oval , _nval ) \
247303({ \
248304 union { \
@@ -255,20 +311,54 @@ do { \
255311 old__.var = _oval; \
256312 new__.var = _nval; \
257313 \
258- asm qual (ALTERNATIVE("leaq %P[var], %%rsi; call this_cpu_cmpxchg16b_emu", \
314+ asm qual (ALTERNATIVE("call this_cpu_cmpxchg16b_emu", \
259315 "cmpxchg16b " __percpu_arg([var]), X86_FEATURE_CX16) \
260316 : [var] "+m" (_var), \
261317 "+a" (old__.low), \
262318 "+d" (old__.high) \
263319 : "b" (new__.low), \
264- "c" (new__.high) \
265- : "memory", "rsi"); \
320+ "c" (new__.high), \
321+ "S" (&(_var)) \
322+ : "memory"); \
266323 \
267324 old__.var; \
268325})
269326
270327#define raw_cpu_cmpxchg128 (pcp , oval , nval ) percpu_cmpxchg128_op(16, , pcp, oval, nval)
271328#define this_cpu_cmpxchg128 (pcp , oval , nval ) percpu_cmpxchg128_op(16, volatile, pcp, oval, nval)
329+
330+ #define percpu_try_cmpxchg128_op (size , qual , _var , _ovalp , _nval ) \
331+ ({ \
332+ bool success; \
333+ u128 *_oval = (u128 *)(_ovalp); \
334+ union { \
335+ u128 var; \
336+ struct { \
337+ u64 low, high; \
338+ }; \
339+ } old__, new__; \
340+ \
341+ old__.var = *_oval; \
342+ new__.var = _nval; \
343+ \
344+ asm qual (ALTERNATIVE("call this_cpu_cmpxchg16b_emu", \
345+ "cmpxchg16b " __percpu_arg([var]), X86_FEATURE_CX16) \
346+ CC_SET(z) \
347+ : CC_OUT(z) (success), \
348+ [var] "+m" (_var), \
349+ "+a" (old__.low), \
350+ "+d" (old__.high) \
351+ : "b" (new__.low), \
352+ "c" (new__.high), \
353+ "S" (&(_var)) \
354+ : "memory"); \
355+ if (unlikely(!success)) \
356+ *_oval = old__.var; \
357+ likely(success); \
358+ })
359+
360+ #define raw_cpu_try_cmpxchg128 (pcp , ovalp , nval ) percpu_try_cmpxchg128_op(16, , pcp, ovalp, nval)
361+ #define this_cpu_try_cmpxchg128 (pcp , ovalp , nval ) percpu_try_cmpxchg128_op(16, volatile, pcp, ovalp, nval)
272362#endif
273363
274364/*
@@ -343,13 +433,19 @@ do { \
343433#define raw_cpu_cmpxchg_1 (pcp , oval , nval ) percpu_cmpxchg_op(1, , pcp, oval, nval)
344434#define raw_cpu_cmpxchg_2 (pcp , oval , nval ) percpu_cmpxchg_op(2, , pcp, oval, nval)
345435#define raw_cpu_cmpxchg_4 (pcp , oval , nval ) percpu_cmpxchg_op(4, , pcp, oval, nval)
436+ #define raw_cpu_try_cmpxchg_1 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(1, , pcp, ovalp, nval)
437+ #define raw_cpu_try_cmpxchg_2 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(2, , pcp, ovalp, nval)
438+ #define raw_cpu_try_cmpxchg_4 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(4, , pcp, ovalp, nval)
346439
347440#define this_cpu_add_return_1 (pcp , val ) percpu_add_return_op(1, volatile, pcp, val)
348441#define this_cpu_add_return_2 (pcp , val ) percpu_add_return_op(2, volatile, pcp, val)
349442#define this_cpu_add_return_4 (pcp , val ) percpu_add_return_op(4, volatile, pcp, val)
350443#define this_cpu_cmpxchg_1 (pcp , oval , nval ) percpu_cmpxchg_op(1, volatile, pcp, oval, nval)
351444#define this_cpu_cmpxchg_2 (pcp , oval , nval ) percpu_cmpxchg_op(2, volatile, pcp, oval, nval)
352445#define this_cpu_cmpxchg_4 (pcp , oval , nval ) percpu_cmpxchg_op(4, volatile, pcp, oval, nval)
446+ #define this_cpu_try_cmpxchg_1 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(1, volatile, pcp, ovalp, nval)
447+ #define this_cpu_try_cmpxchg_2 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(2, volatile, pcp, ovalp, nval)
448+ #define this_cpu_try_cmpxchg_4 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(4, volatile, pcp, ovalp, nval)
353449
354450/*
355451 * Per cpu atomic 64 bit operations are only available under 64 bit.
@@ -364,6 +460,7 @@ do { \
364460#define raw_cpu_add_return_8 (pcp , val ) percpu_add_return_op(8, , pcp, val)
365461#define raw_cpu_xchg_8 (pcp , nval ) raw_percpu_xchg_op(pcp, nval)
366462#define raw_cpu_cmpxchg_8 (pcp , oval , nval ) percpu_cmpxchg_op(8, , pcp, oval, nval)
463+ #define raw_cpu_try_cmpxchg_8 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(8, , pcp, ovalp, nval)
367464
368465#define this_cpu_read_8 (pcp ) percpu_from_op(8, volatile, "mov", pcp)
369466#define this_cpu_write_8 (pcp , val ) percpu_to_op(8, volatile, "mov", (pcp), val)
@@ -373,6 +470,7 @@ do { \
373470#define this_cpu_add_return_8 (pcp , val ) percpu_add_return_op(8, volatile, pcp, val)
374471#define this_cpu_xchg_8 (pcp , nval ) percpu_xchg_op(8, volatile, pcp, nval)
375472#define this_cpu_cmpxchg_8 (pcp , oval , nval ) percpu_cmpxchg_op(8, volatile, pcp, oval, nval)
473+ #define this_cpu_try_cmpxchg_8 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(8, volatile, pcp, ovalp, nval)
376474#endif
377475
378476static __always_inline bool x86_this_cpu_constant_test_bit (unsigned int nr ,
0 commit comments