Skip to content

Commit e5ab9ef

Browse files
KAGA-KOKOPeter Zijlstra
authored andcommitted
atomics: Provide atomic_add_negative() variants
atomic_add_negative() does not provide the relaxed/acquire/release variants. Provide them in preparation for a new scalable reference count algorithm. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Mark Rutland <mark.rutland@arm.com> Link: https://lore.kernel.org/r/20230323102800.101763813@linutronix.de
1 parent fe15c26 commit e5ab9ef

5 files changed

Lines changed: 309 additions & 18 deletions

File tree

include/linux/atomic/atomic-arch-fallback.h

Lines changed: 199 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1208,15 +1208,21 @@ arch_atomic_inc_and_test(atomic_t *v)
12081208
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
12091209
#endif
12101210

1211+
#ifndef arch_atomic_add_negative_relaxed
1212+
#ifdef arch_atomic_add_negative
1213+
#define arch_atomic_add_negative_acquire arch_atomic_add_negative
1214+
#define arch_atomic_add_negative_release arch_atomic_add_negative
1215+
#define arch_atomic_add_negative_relaxed arch_atomic_add_negative
1216+
#endif /* arch_atomic_add_negative */
1217+
12111218
#ifndef arch_atomic_add_negative
12121219
/**
1213-
* arch_atomic_add_negative - add and test if negative
1220+
* arch_atomic_add_negative - Add and test if negative
12141221
* @i: integer value to add
12151222
* @v: pointer of type atomic_t
12161223
*
1217-
* Atomically adds @i to @v and returns true
1218-
* if the result is negative, or false when
1219-
* result is greater than or equal to zero.
1224+
* Atomically adds @i to @v and returns true if the result is negative,
1225+
* or false when the result is greater than or equal to zero.
12201226
*/
12211227
static __always_inline bool
12221228
arch_atomic_add_negative(int i, atomic_t *v)
@@ -1226,6 +1232,95 @@ arch_atomic_add_negative(int i, atomic_t *v)
12261232
#define arch_atomic_add_negative arch_atomic_add_negative
12271233
#endif
12281234

1235+
#ifndef arch_atomic_add_negative_acquire
1236+
/**
1237+
* arch_atomic_add_negative_acquire - Add and test if negative
1238+
* @i: integer value to add
1239+
* @v: pointer of type atomic_t
1240+
*
1241+
* Atomically adds @i to @v and returns true if the result is negative,
1242+
* or false when the result is greater than or equal to zero.
1243+
*/
1244+
static __always_inline bool
1245+
arch_atomic_add_negative_acquire(int i, atomic_t *v)
1246+
{
1247+
return arch_atomic_add_return_acquire(i, v) < 0;
1248+
}
1249+
#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
1250+
#endif
1251+
1252+
#ifndef arch_atomic_add_negative_release
1253+
/**
1254+
* arch_atomic_add_negative_release - Add and test if negative
1255+
* @i: integer value to add
1256+
* @v: pointer of type atomic_t
1257+
*
1258+
* Atomically adds @i to @v and returns true if the result is negative,
1259+
* or false when the result is greater than or equal to zero.
1260+
*/
1261+
static __always_inline bool
1262+
arch_atomic_add_negative_release(int i, atomic_t *v)
1263+
{
1264+
return arch_atomic_add_return_release(i, v) < 0;
1265+
}
1266+
#define arch_atomic_add_negative_release arch_atomic_add_negative_release
1267+
#endif
1268+
1269+
#ifndef arch_atomic_add_negative_relaxed
1270+
/**
1271+
* arch_atomic_add_negative_relaxed - Add and test if negative
1272+
* @i: integer value to add
1273+
* @v: pointer of type atomic_t
1274+
*
1275+
* Atomically adds @i to @v and returns true if the result is negative,
1276+
* or false when the result is greater than or equal to zero.
1277+
*/
1278+
static __always_inline bool
1279+
arch_atomic_add_negative_relaxed(int i, atomic_t *v)
1280+
{
1281+
return arch_atomic_add_return_relaxed(i, v) < 0;
1282+
}
1283+
#define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed
1284+
#endif
1285+
1286+
#else /* arch_atomic_add_negative_relaxed */
1287+
1288+
#ifndef arch_atomic_add_negative_acquire
1289+
static __always_inline bool
1290+
arch_atomic_add_negative_acquire(int i, atomic_t *v)
1291+
{
1292+
bool ret = arch_atomic_add_negative_relaxed(i, v);
1293+
__atomic_acquire_fence();
1294+
return ret;
1295+
}
1296+
#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
1297+
#endif
1298+
1299+
#ifndef arch_atomic_add_negative_release
1300+
static __always_inline bool
1301+
arch_atomic_add_negative_release(int i, atomic_t *v)
1302+
{
1303+
__atomic_release_fence();
1304+
return arch_atomic_add_negative_relaxed(i, v);
1305+
}
1306+
#define arch_atomic_add_negative_release arch_atomic_add_negative_release
1307+
#endif
1308+
1309+
#ifndef arch_atomic_add_negative
1310+
static __always_inline bool
1311+
arch_atomic_add_negative(int i, atomic_t *v)
1312+
{
1313+
bool ret;
1314+
__atomic_pre_full_fence();
1315+
ret = arch_atomic_add_negative_relaxed(i, v);
1316+
__atomic_post_full_fence();
1317+
return ret;
1318+
}
1319+
#define arch_atomic_add_negative arch_atomic_add_negative
1320+
#endif
1321+
1322+
#endif /* arch_atomic_add_negative_relaxed */
1323+
12291324
#ifndef arch_atomic_fetch_add_unless
12301325
/**
12311326
* arch_atomic_fetch_add_unless - add unless the number is already a given value
@@ -2329,15 +2424,21 @@ arch_atomic64_inc_and_test(atomic64_t *v)
23292424
#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
23302425
#endif
23312426

2427+
#ifndef arch_atomic64_add_negative_relaxed
2428+
#ifdef arch_atomic64_add_negative
2429+
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative
2430+
#define arch_atomic64_add_negative_release arch_atomic64_add_negative
2431+
#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative
2432+
#endif /* arch_atomic64_add_negative */
2433+
23322434
#ifndef arch_atomic64_add_negative
23332435
/**
2334-
* arch_atomic64_add_negative - add and test if negative
2436+
* arch_atomic64_add_negative - Add and test if negative
23352437
* @i: integer value to add
23362438
* @v: pointer of type atomic64_t
23372439
*
2338-
* Atomically adds @i to @v and returns true
2339-
* if the result is negative, or false when
2340-
* result is greater than or equal to zero.
2440+
* Atomically adds @i to @v and returns true if the result is negative,
2441+
* or false when the result is greater than or equal to zero.
23412442
*/
23422443
static __always_inline bool
23432444
arch_atomic64_add_negative(s64 i, atomic64_t *v)
@@ -2347,6 +2448,95 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v)
23472448
#define arch_atomic64_add_negative arch_atomic64_add_negative
23482449
#endif
23492450

2451+
#ifndef arch_atomic64_add_negative_acquire
2452+
/**
2453+
* arch_atomic64_add_negative_acquire - Add and test if negative
2454+
* @i: integer value to add
2455+
* @v: pointer of type atomic64_t
2456+
*
2457+
* Atomically adds @i to @v and returns true if the result is negative,
2458+
* or false when the result is greater than or equal to zero.
2459+
*/
2460+
static __always_inline bool
2461+
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
2462+
{
2463+
return arch_atomic64_add_return_acquire(i, v) < 0;
2464+
}
2465+
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
2466+
#endif
2467+
2468+
#ifndef arch_atomic64_add_negative_release
2469+
/**
2470+
* arch_atomic64_add_negative_release - Add and test if negative
2471+
* @i: integer value to add
2472+
* @v: pointer of type atomic64_t
2473+
*
2474+
* Atomically adds @i to @v and returns true if the result is negative,
2475+
* or false when the result is greater than or equal to zero.
2476+
*/
2477+
static __always_inline bool
2478+
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
2479+
{
2480+
return arch_atomic64_add_return_release(i, v) < 0;
2481+
}
2482+
#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
2483+
#endif
2484+
2485+
#ifndef arch_atomic64_add_negative_relaxed
2486+
/**
2487+
* arch_atomic64_add_negative_relaxed - Add and test if negative
2488+
* @i: integer value to add
2489+
* @v: pointer of type atomic64_t
2490+
*
2491+
* Atomically adds @i to @v and returns true if the result is negative,
2492+
* or false when the result is greater than or equal to zero.
2493+
*/
2494+
static __always_inline bool
2495+
arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
2496+
{
2497+
return arch_atomic64_add_return_relaxed(i, v) < 0;
2498+
}
2499+
#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed
2500+
#endif
2501+
2502+
#else /* arch_atomic64_add_negative_relaxed */
2503+
2504+
#ifndef arch_atomic64_add_negative_acquire
2505+
static __always_inline bool
2506+
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
2507+
{
2508+
bool ret = arch_atomic64_add_negative_relaxed(i, v);
2509+
__atomic_acquire_fence();
2510+
return ret;
2511+
}
2512+
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
2513+
#endif
2514+
2515+
#ifndef arch_atomic64_add_negative_release
2516+
static __always_inline bool
2517+
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
2518+
{
2519+
__atomic_release_fence();
2520+
return arch_atomic64_add_negative_relaxed(i, v);
2521+
}
2522+
#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
2523+
#endif
2524+
2525+
#ifndef arch_atomic64_add_negative
2526+
static __always_inline bool
2527+
arch_atomic64_add_negative(s64 i, atomic64_t *v)
2528+
{
2529+
bool ret;
2530+
__atomic_pre_full_fence();
2531+
ret = arch_atomic64_add_negative_relaxed(i, v);
2532+
__atomic_post_full_fence();
2533+
return ret;
2534+
}
2535+
#define arch_atomic64_add_negative arch_atomic64_add_negative
2536+
#endif
2537+
2538+
#endif /* arch_atomic64_add_negative_relaxed */
2539+
23502540
#ifndef arch_atomic64_fetch_add_unless
23512541
/**
23522542
* arch_atomic64_fetch_add_unless - add unless the number is already a given value
@@ -2456,4 +2646,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
24562646
#endif
24572647

24582648
#endif /* _LINUX_ATOMIC_FALLBACK_H */
2459-
// b5e87bdd5ede61470c29f7a7e4de781af3770f09
2649+
// 00071fffa021cec66f6290d706d69c91df87bade

include/linux/atomic/atomic-instrumented.h

Lines changed: 67 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -592,6 +592,28 @@ atomic_add_negative(int i, atomic_t *v)
592592
return arch_atomic_add_negative(i, v);
593593
}
594594

595+
static __always_inline bool
596+
atomic_add_negative_acquire(int i, atomic_t *v)
597+
{
598+
instrument_atomic_read_write(v, sizeof(*v));
599+
return arch_atomic_add_negative_acquire(i, v);
600+
}
601+
602+
static __always_inline bool
603+
atomic_add_negative_release(int i, atomic_t *v)
604+
{
605+
kcsan_release();
606+
instrument_atomic_read_write(v, sizeof(*v));
607+
return arch_atomic_add_negative_release(i, v);
608+
}
609+
610+
static __always_inline bool
611+
atomic_add_negative_relaxed(int i, atomic_t *v)
612+
{
613+
instrument_atomic_read_write(v, sizeof(*v));
614+
return arch_atomic_add_negative_relaxed(i, v);
615+
}
616+
595617
static __always_inline int
596618
atomic_fetch_add_unless(atomic_t *v, int a, int u)
597619
{
@@ -1211,6 +1233,28 @@ atomic64_add_negative(s64 i, atomic64_t *v)
12111233
return arch_atomic64_add_negative(i, v);
12121234
}
12131235

1236+
static __always_inline bool
1237+
atomic64_add_negative_acquire(s64 i, atomic64_t *v)
1238+
{
1239+
instrument_atomic_read_write(v, sizeof(*v));
1240+
return arch_atomic64_add_negative_acquire(i, v);
1241+
}
1242+
1243+
static __always_inline bool
1244+
atomic64_add_negative_release(s64 i, atomic64_t *v)
1245+
{
1246+
kcsan_release();
1247+
instrument_atomic_read_write(v, sizeof(*v));
1248+
return arch_atomic64_add_negative_release(i, v);
1249+
}
1250+
1251+
static __always_inline bool
1252+
atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
1253+
{
1254+
instrument_atomic_read_write(v, sizeof(*v));
1255+
return arch_atomic64_add_negative_relaxed(i, v);
1256+
}
1257+
12141258
static __always_inline s64
12151259
atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
12161260
{
@@ -1830,6 +1874,28 @@ atomic_long_add_negative(long i, atomic_long_t *v)
18301874
return arch_atomic_long_add_negative(i, v);
18311875
}
18321876

1877+
static __always_inline bool
1878+
atomic_long_add_negative_acquire(long i, atomic_long_t *v)
1879+
{
1880+
instrument_atomic_read_write(v, sizeof(*v));
1881+
return arch_atomic_long_add_negative_acquire(i, v);
1882+
}
1883+
1884+
static __always_inline bool
1885+
atomic_long_add_negative_release(long i, atomic_long_t *v)
1886+
{
1887+
kcsan_release();
1888+
instrument_atomic_read_write(v, sizeof(*v));
1889+
return arch_atomic_long_add_negative_release(i, v);
1890+
}
1891+
1892+
static __always_inline bool
1893+
atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
1894+
{
1895+
instrument_atomic_read_write(v, sizeof(*v));
1896+
return arch_atomic_long_add_negative_relaxed(i, v);
1897+
}
1898+
18331899
static __always_inline long
18341900
atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
18351901
{
@@ -2083,4 +2149,4 @@ atomic_long_dec_if_positive(atomic_long_t *v)
20832149
})
20842150

20852151
#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
2086-
// 764f741eb77a7ad565dc8d99ce2837d5542e8aee
2152+
// 1b485de9cbaa4900de59e14ee2084357eaeb1c3a

include/linux/atomic/atomic-long.h

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -479,6 +479,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v)
479479
return arch_atomic64_add_negative(i, v);
480480
}
481481

482+
static __always_inline bool
483+
arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
484+
{
485+
return arch_atomic64_add_negative_acquire(i, v);
486+
}
487+
488+
static __always_inline bool
489+
arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
490+
{
491+
return arch_atomic64_add_negative_release(i, v);
492+
}
493+
494+
static __always_inline bool
495+
arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
496+
{
497+
return arch_atomic64_add_negative_relaxed(i, v);
498+
}
499+
482500
static __always_inline long
483501
arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
484502
{
@@ -973,6 +991,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v)
973991
return arch_atomic_add_negative(i, v);
974992
}
975993

994+
static __always_inline bool
995+
arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
996+
{
997+
return arch_atomic_add_negative_acquire(i, v);
998+
}
999+
1000+
static __always_inline bool
1001+
arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
1002+
{
1003+
return arch_atomic_add_negative_release(i, v);
1004+
}
1005+
1006+
static __always_inline bool
1007+
arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
1008+
{
1009+
return arch_atomic_add_negative_relaxed(i, v);
1010+
}
1011+
9761012
static __always_inline long
9771013
arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
9781014
{
@@ -1011,4 +1047,4 @@ arch_atomic_long_dec_if_positive(atomic_long_t *v)
10111047

10121048
#endif /* CONFIG_64BIT */
10131049
#endif /* _LINUX_ATOMIC_LONG_H */
1014-
// e8f0e08ff072b74d180eabe2ad001282b38c2c88
1050+
// a194c07d7d2f4b0e178d3c118c919775d5d65f50

0 commit comments

Comments
 (0)