1515#include <asm/barrier.h>
1616#include <asm/cmpxchg.h>
1717
18- static inline int atomic_read (const atomic_t * v )
18+ static inline int arch_atomic_read (const atomic_t * v )
1919{
2020 return __atomic_read (v );
2121}
22+ #define arch_atomic_read arch_atomic_read
2223
23- static inline void atomic_set (atomic_t * v , int i )
24+ static inline void arch_atomic_set (atomic_t * v , int i )
2425{
2526 __atomic_set (v , i );
2627}
28+ #define arch_atomic_set arch_atomic_set
2729
28- static inline int atomic_add_return (int i , atomic_t * v )
30+ static inline int arch_atomic_add_return (int i , atomic_t * v )
2931{
3032 return __atomic_add_barrier (i , & v -> counter ) + i ;
3133}
34+ #define arch_atomic_add_return arch_atomic_add_return
3235
33- static inline int atomic_fetch_add (int i , atomic_t * v )
36+ static inline int arch_atomic_fetch_add (int i , atomic_t * v )
3437{
3538 return __atomic_add_barrier (i , & v -> counter );
3639}
40+ #define arch_atomic_fetch_add arch_atomic_fetch_add
3741
38- static inline void atomic_add (int i , atomic_t * v )
42+ static inline void arch_atomic_add (int i , atomic_t * v )
3943{
4044 __atomic_add (i , & v -> counter );
4145}
46+ #define arch_atomic_add arch_atomic_add
4247
43- #define atomic_sub (_i , _v ) atomic_add (-(int)(_i), _v)
44- #define atomic_sub_return (_i , _v ) atomic_add_return (-(int)(_i), _v)
45- #define atomic_fetch_sub (_i , _v ) atomic_fetch_add (-(int)(_i), _v)
48+ #define arch_atomic_sub (_i , _v ) arch_atomic_add (-(int)(_i), _v)
49+ #define arch_atomic_sub_return (_i , _v ) arch_atomic_add_return (-(int)(_i), _v)
50+ #define arch_atomic_fetch_sub (_i , _v ) arch_atomic_fetch_add (-(int)(_i), _v)
4651
4752#define ATOMIC_OPS (op ) \
48- static inline void atomic_ ##op(int i, atomic_t *v) \
53+ static inline void arch_atomic_ ##op(int i, atomic_t *v) \
4954{ \
5055 __atomic_##op(i, &v->counter); \
5156} \
52- static inline int atomic_fetch_ ##op(int i, atomic_t *v) \
57+ static inline int arch_atomic_fetch_ ##op(int i, atomic_t *v) \
5358{ \
5459 return __atomic_##op##_barrier(i, &v->counter); \
5560}
@@ -60,53 +65,67 @@ ATOMIC_OPS(xor)
6065
6166#undef ATOMIC_OPS
6267
63- #define atomic_xchg (v , new ) (xchg(&((v)->counter), new))
68+ #define arch_atomic_and arch_atomic_and
69+ #define arch_atomic_or arch_atomic_or
70+ #define arch_atomic_xor arch_atomic_xor
71+ #define arch_atomic_fetch_and arch_atomic_fetch_and
72+ #define arch_atomic_fetch_or arch_atomic_fetch_or
73+ #define arch_atomic_fetch_xor arch_atomic_fetch_xor
6474
65- static inline int atomic_cmpxchg (atomic_t * v , int old , int new )
75+ #define arch_atomic_xchg (v , new ) (arch_xchg(&((v)->counter), new))
76+
77+ static inline int arch_atomic_cmpxchg (atomic_t * v , int old , int new )
6678{
6779 return __atomic_cmpxchg (& v -> counter , old , new );
6880}
81+ #define arch_atomic_cmpxchg arch_atomic_cmpxchg
6982
7083#define ATOMIC64_INIT (i ) { (i) }
7184
72- static inline s64 atomic64_read (const atomic64_t * v )
85+ static inline s64 arch_atomic64_read (const atomic64_t * v )
7386{
7487 return __atomic64_read (v );
7588}
89+ #define arch_atomic64_read arch_atomic64_read
7690
77- static inline void atomic64_set (atomic64_t * v , s64 i )
91+ static inline void arch_atomic64_set (atomic64_t * v , s64 i )
7892{
7993 __atomic64_set (v , i );
8094}
95+ #define arch_atomic64_set arch_atomic64_set
8196
82- static inline s64 atomic64_add_return (s64 i , atomic64_t * v )
97+ static inline s64 arch_atomic64_add_return (s64 i , atomic64_t * v )
8398{
8499 return __atomic64_add_barrier (i , (long * )& v -> counter ) + i ;
85100}
101+ #define arch_atomic64_add_return arch_atomic64_add_return
86102
87- static inline s64 atomic64_fetch_add (s64 i , atomic64_t * v )
103+ static inline s64 arch_atomic64_fetch_add (s64 i , atomic64_t * v )
88104{
89105 return __atomic64_add_barrier (i , (long * )& v -> counter );
90106}
107+ #define arch_atomic64_fetch_add arch_atomic64_fetch_add
91108
92- static inline void atomic64_add (s64 i , atomic64_t * v )
109+ static inline void arch_atomic64_add (s64 i , atomic64_t * v )
93110{
94111 __atomic64_add (i , (long * )& v -> counter );
95112}
113+ #define arch_atomic64_add arch_atomic64_add
96114
97- #define atomic64_xchg (v , new ) (xchg (&((v)->counter), new))
115+ #define arch_atomic64_xchg (v , new ) (arch_xchg (&((v)->counter), new))
98116
99- static inline s64 atomic64_cmpxchg (atomic64_t * v , s64 old , s64 new )
117+ static inline s64 arch_atomic64_cmpxchg (atomic64_t * v , s64 old , s64 new )
100118{
101119 return __atomic64_cmpxchg ((long * )& v -> counter , old , new );
102120}
121+ #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
103122
104123#define ATOMIC64_OPS (op ) \
105- static inline void atomic64_ ##op(s64 i, atomic64_t *v) \
124+ static inline void arch_atomic64_ ##op(s64 i, atomic64_t *v) \
106125{ \
107126 __atomic64_##op(i, (long *)&v->counter); \
108127} \
109- static inline long atomic64_fetch_ ##op(s64 i, atomic64_t *v) \
128+ static inline long arch_atomic64_fetch_ ##op(s64 i, atomic64_t *v) \
110129{ \
111130 return __atomic64_##op##_barrier(i, (long *)&v->counter); \
112131}
@@ -117,8 +136,17 @@ ATOMIC64_OPS(xor)
117136
118137#undef ATOMIC64_OPS
119138
120- #define atomic64_sub_return (_i , _v ) atomic64_add_return(-(s64)(_i), _v)
121- #define atomic64_fetch_sub (_i , _v ) atomic64_fetch_add(-(s64)(_i), _v)
122- #define atomic64_sub (_i , _v ) atomic64_add(-(s64)(_i), _v)
139+ #define arch_atomic64_and arch_atomic64_and
140+ #define arch_atomic64_or arch_atomic64_or
141+ #define arch_atomic64_xor arch_atomic64_xor
142+ #define arch_atomic64_fetch_and arch_atomic64_fetch_and
143+ #define arch_atomic64_fetch_or arch_atomic64_fetch_or
144+ #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
145+
146+ #define arch_atomic64_sub_return (_i , _v ) arch_atomic64_add_return(-(s64)(_i), _v)
147+ #define arch_atomic64_fetch_sub (_i , _v ) arch_atomic64_fetch_add(-(s64)(_i), _v)
148+ #define arch_atomic64_sub (_i , _v ) arch_atomic64_add(-(s64)(_i), _v)
149+
150+ #define ARCH_ATOMIC
123151
124152#endif /* __ARCH_S390_ATOMIC__ */
0 commit comments