|
147 | 147 |
|
148 | 148 | #endif /* arch_try_cmpxchg_relaxed */ |
149 | 149 |
|
| 150 | +#ifndef arch_try_cmpxchg64_relaxed |
| 151 | +#ifdef arch_try_cmpxchg64 |
| 152 | +#define arch_try_cmpxchg64_acquire arch_try_cmpxchg64 |
| 153 | +#define arch_try_cmpxchg64_release arch_try_cmpxchg64 |
| 154 | +#define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64 |
| 155 | +#endif /* arch_try_cmpxchg64 */ |
| 156 | + |
| 157 | +#ifndef arch_try_cmpxchg64 |
| 158 | +#define arch_try_cmpxchg64(_ptr, _oldp, _new) \ |
| 159 | +({ \ |
| 160 | + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ |
| 161 | + ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \ |
| 162 | + if (unlikely(___r != ___o)) \ |
| 163 | + *___op = ___r; \ |
| 164 | + likely(___r == ___o); \ |
| 165 | +}) |
| 166 | +#endif /* arch_try_cmpxchg64 */ |
| 167 | + |
| 168 | +#ifndef arch_try_cmpxchg64_acquire |
| 169 | +#define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ |
| 170 | +({ \ |
| 171 | + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ |
| 172 | + ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \ |
| 173 | + if (unlikely(___r != ___o)) \ |
| 174 | + *___op = ___r; \ |
| 175 | + likely(___r == ___o); \ |
| 176 | +}) |
| 177 | +#endif /* arch_try_cmpxchg64_acquire */ |
| 178 | + |
| 179 | +#ifndef arch_try_cmpxchg64_release |
| 180 | +#define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \ |
| 181 | +({ \ |
| 182 | + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ |
| 183 | + ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \ |
| 184 | + if (unlikely(___r != ___o)) \ |
| 185 | + *___op = ___r; \ |
| 186 | + likely(___r == ___o); \ |
| 187 | +}) |
| 188 | +#endif /* arch_try_cmpxchg64_release */ |
| 189 | + |
| 190 | +#ifndef arch_try_cmpxchg64_relaxed |
| 191 | +#define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ |
| 192 | +({ \ |
| 193 | + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ |
| 194 | + ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ |
| 195 | + if (unlikely(___r != ___o)) \ |
| 196 | + *___op = ___r; \ |
| 197 | + likely(___r == ___o); \ |
| 198 | +}) |
| 199 | +#endif /* arch_try_cmpxchg64_relaxed */ |
| 200 | + |
| 201 | +#else /* arch_try_cmpxchg64_relaxed */ |
| 202 | + |
| 203 | +#ifndef arch_try_cmpxchg64_acquire |
| 204 | +#define arch_try_cmpxchg64_acquire(...) \ |
| 205 | + __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) |
| 206 | +#endif |
| 207 | + |
| 208 | +#ifndef arch_try_cmpxchg64_release |
| 209 | +#define arch_try_cmpxchg64_release(...) \ |
| 210 | + __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) |
| 211 | +#endif |
| 212 | + |
| 213 | +#ifndef arch_try_cmpxchg64 |
| 214 | +#define arch_try_cmpxchg64(...) \ |
| 215 | + __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) |
| 216 | +#endif |
| 217 | + |
| 218 | +#endif /* arch_try_cmpxchg64_relaxed */ |
| 219 | + |
150 | 220 | #ifndef arch_atomic_read_acquire |
151 | 221 | static __always_inline int |
152 | 222 | arch_atomic_read_acquire(const atomic_t *v) |
@@ -2386,4 +2456,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v) |
2386 | 2456 | #endif |
2387 | 2457 |
|
2388 | 2458 | #endif /* _LINUX_ATOMIC_FALLBACK_H */ |
2389 | | -// 8e2cc06bc0d2c0967d2f8424762bd48555ee40ae |
| 2459 | +// b5e87bdd5ede61470c29f7a7e4de781af3770f09 |
0 commit comments