@@ -210,6 +210,25 @@ do { \
210210 (typeof(_var))(unsigned long) pco_old__; \
211211})
212212
213+ #define percpu_try_cmpxchg_op (size , qual , _var , _ovalp , _nval ) \
214+ ({ \
215+ bool success; \
216+ __pcpu_type_##size *pco_oval__ = (__pcpu_type_##size *)(_ovalp); \
217+ __pcpu_type_##size pco_old__ = *pco_oval__; \
218+ __pcpu_type_##size pco_new__ = __pcpu_cast_##size(_nval); \
219+ asm qual (__pcpu_op2_##size("cmpxchg", "%[nval]", \
220+ __percpu_arg([var])) \
221+ CC_SET(z) \
222+ : CC_OUT(z) (success), \
223+ [oval] "+a" (pco_old__), \
224+ [var] "+m" (_var) \
225+ : [nval] __pcpu_reg_##size(, pco_new__) \
226+ : "memory"); \
227+ if (unlikely(!success)) \
228+ *pco_oval__ = pco_old__; \
229+ likely(success); \
230+ })
231+
213232#if defined(CONFIG_X86_32 ) && !defined(CONFIG_UML )
214233#define percpu_cmpxchg64_op (size , qual , _var , _oval , _nval ) \
215234({ \
@@ -410,13 +429,19 @@ do { \
410429#define raw_cpu_cmpxchg_1 (pcp , oval , nval ) percpu_cmpxchg_op(1, , pcp, oval, nval)
411430#define raw_cpu_cmpxchg_2 (pcp , oval , nval ) percpu_cmpxchg_op(2, , pcp, oval, nval)
412431#define raw_cpu_cmpxchg_4 (pcp , oval , nval ) percpu_cmpxchg_op(4, , pcp, oval, nval)
432+ #define raw_cpu_try_cmpxchg_1 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(1, , pcp, ovalp, nval)
433+ #define raw_cpu_try_cmpxchg_2 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(2, , pcp, ovalp, nval)
434+ #define raw_cpu_try_cmpxchg_4 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(4, , pcp, ovalp, nval)
413435
414436#define this_cpu_add_return_1 (pcp , val ) percpu_add_return_op(1, volatile, pcp, val)
415437#define this_cpu_add_return_2 (pcp , val ) percpu_add_return_op(2, volatile, pcp, val)
416438#define this_cpu_add_return_4 (pcp , val ) percpu_add_return_op(4, volatile, pcp, val)
417439#define this_cpu_cmpxchg_1 (pcp , oval , nval ) percpu_cmpxchg_op(1, volatile, pcp, oval, nval)
418440#define this_cpu_cmpxchg_2 (pcp , oval , nval ) percpu_cmpxchg_op(2, volatile, pcp, oval, nval)
419441#define this_cpu_cmpxchg_4 (pcp , oval , nval ) percpu_cmpxchg_op(4, volatile, pcp, oval, nval)
442+ #define this_cpu_try_cmpxchg_1 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(1, volatile, pcp, ovalp, nval)
443+ #define this_cpu_try_cmpxchg_2 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(2, volatile, pcp, ovalp, nval)
444+ #define this_cpu_try_cmpxchg_4 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(4, volatile, pcp, ovalp, nval)
420445
421446/*
422447 * Per cpu atomic 64 bit operations are only available under 64 bit.
@@ -431,6 +456,7 @@ do { \
431456#define raw_cpu_add_return_8 (pcp , val ) percpu_add_return_op(8, , pcp, val)
432457#define raw_cpu_xchg_8 (pcp , nval ) raw_percpu_xchg_op(pcp, nval)
433458#define raw_cpu_cmpxchg_8 (pcp , oval , nval ) percpu_cmpxchg_op(8, , pcp, oval, nval)
459+ #define raw_cpu_try_cmpxchg_8 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(8, , pcp, ovalp, nval)
434460
435461#define this_cpu_read_8 (pcp ) percpu_from_op(8, volatile, "mov", pcp)
436462#define this_cpu_write_8 (pcp , val ) percpu_to_op(8, volatile, "mov", (pcp), val)
@@ -440,6 +466,7 @@ do { \
440466#define this_cpu_add_return_8 (pcp , val ) percpu_add_return_op(8, volatile, pcp, val)
441467#define this_cpu_xchg_8 (pcp , nval ) percpu_xchg_op(8, volatile, pcp, nval)
442468#define this_cpu_cmpxchg_8 (pcp , oval , nval ) percpu_cmpxchg_op(8, volatile, pcp, oval, nval)
469+ #define this_cpu_try_cmpxchg_8 (pcp , ovalp , nval ) percpu_try_cmpxchg_op(8, volatile, pcp, ovalp, nval)
443470#endif
444471
445472static __always_inline bool x86_this_cpu_constant_test_bit (unsigned int nr ,
0 commit comments