Skip to content

Commit 13525f0

Browse files
committed
s390/cmpxchg: use unsigned long values instead of void pointers
gcc and clang warn about incompatible pointer types due to the recent cmpxchg changes: drivers/gpu/drm/drm_lock.c:75:10: error: passing 'typeof (lock)' (aka 'volatile unsigned int *') to parameter of type 'void *' discards qualifiers [-Werror,-Wincompatible-pointer-types-discards-qualifiers] prev = cmpxchg(lock, old, new); ^~~~~~~~~~~~~~~~~~~~~~~ include/asm-generic/atomic-instrumented.h:1685:2: note: expanded from macro 'cmpxchg' arch_cmpxchg(__ai_ptr, __VA_ARGS__); \ ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To avoid this simply cast pointers to unsigned long and use them instead of void pointers. This allows to stay with functions, instead of using complex defines and having to deal with all their potential side effects. Reported-by: kernel test robot <lkp@intel.com> Fixes: d2b1f6d ("s390/cmpxchg: get rid of gcc atomic builtins") Link: https://lore.kernel.org/linux-s390/202104130131.sMmSqpb5-lkp@intel.com/ Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
1 parent 9d42a4d commit 13525f0

1 file changed

Lines changed: 23 additions & 26 deletions

File tree

arch/s390/include/asm/cmpxchg.h

Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -14,39 +14,37 @@
1414

1515
void __xchg_called_with_bad_pointer(void);
1616

17-
static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
17+
static inline unsigned long __xchg(unsigned long x, unsigned long address, int size)
1818
{
19-
unsigned long addr, old;
19+
unsigned long old;
2020
int shift;
2121

2222
switch (size) {
2323
case 1:
24-
addr = (unsigned long) ptr;
25-
shift = (3 ^ (addr & 3)) << 3;
26-
addr ^= addr & 3;
24+
shift = (3 ^ (address & 3)) << 3;
25+
address ^= address & 3;
2726
asm volatile(
2827
" l %0,%1\n"
2928
"0: lr 0,%0\n"
3029
" nr 0,%3\n"
3130
" or 0,%2\n"
3231
" cs %0,0,%1\n"
3332
" jl 0b\n"
34-
: "=&d" (old), "+Q" (*(int *) addr)
33+
: "=&d" (old), "+Q" (*(int *) address)
3534
: "d" ((x & 0xff) << shift), "d" (~(0xff << shift))
3635
: "memory", "cc", "0");
3736
return old >> shift;
3837
case 2:
39-
addr = (unsigned long) ptr;
40-
shift = (2 ^ (addr & 2)) << 3;
41-
addr ^= addr & 2;
38+
shift = (2 ^ (address & 2)) << 3;
39+
address ^= address & 2;
4240
asm volatile(
4341
" l %0,%1\n"
4442
"0: lr 0,%0\n"
4543
" nr 0,%3\n"
4644
" or 0,%2\n"
4745
" cs %0,0,%1\n"
4846
" jl 0b\n"
49-
: "=&d" (old), "+Q" (*(int *) addr)
47+
: "=&d" (old), "+Q" (*(int *) address)
5048
: "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift))
5149
: "memory", "cc", "0");
5250
return old >> shift;
@@ -55,7 +53,7 @@ static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
5553
" l %0,%1\n"
5654
"0: cs %0,%2,%1\n"
5755
" jl 0b\n"
58-
: "=&d" (old), "+Q" (*(int *) ptr)
56+
: "=&d" (old), "+Q" (*(int *) address)
5957
: "d" (x)
6058
: "memory", "cc");
6159
return old;
@@ -64,7 +62,7 @@ static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
6462
" lg %0,%1\n"
6563
"0: csg %0,%2,%1\n"
6664
" jl 0b\n"
67-
: "=&d" (old), "+S" (*(long *) ptr)
65+
: "=&d" (old), "+S" (*(long *) address)
6866
: "d" (x)
6967
: "memory", "cc");
7068
return old;
@@ -78,23 +76,23 @@ static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
7876
__typeof__(*(ptr)) __ret; \
7977
\
8078
__ret = (__typeof__(*(ptr))) \
81-
__xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr))); \
79+
__xchg((unsigned long)(x), (unsigned long)(ptr), \
80+
sizeof(*(ptr))); \
8281
__ret; \
8382
})
8483

8584
void __cmpxchg_called_with_bad_pointer(void);
8685

87-
static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
86+
static inline unsigned long __cmpxchg(unsigned long address, unsigned long old,
8887
unsigned long new, int size)
8988
{
90-
unsigned long addr, prev, tmp;
89+
unsigned long prev, tmp;
9190
int shift;
9291

9392
switch (size) {
9493
case 1:
95-
addr = (unsigned long) ptr;
96-
shift = (3 ^ (addr & 3)) << 3;
97-
addr ^= addr & 3;
94+
shift = (3 ^ (address & 3)) << 3;
95+
address ^= address & 3;
9896
asm volatile(
9997
" l %0,%2\n"
10098
"0: nr %0,%5\n"
@@ -107,16 +105,15 @@ static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
107105
" nr %1,%5\n"
108106
" jnz 0b\n"
109107
"1:"
110-
: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
108+
: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
111109
: "d" ((old & 0xff) << shift),
112110
"d" ((new & 0xff) << shift),
113111
"d" (~(0xff << shift))
114112
: "memory", "cc");
115113
return prev >> shift;
116114
case 2:
117-
addr = (unsigned long) ptr;
118-
shift = (2 ^ (addr & 2)) << 3;
119-
addr ^= addr & 2;
115+
shift = (2 ^ (address & 2)) << 3;
116+
address ^= address & 2;
120117
asm volatile(
121118
" l %0,%2\n"
122119
"0: nr %0,%5\n"
@@ -129,7 +126,7 @@ static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
129126
" nr %1,%5\n"
130127
" jnz 0b\n"
131128
"1:"
132-
: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
129+
: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
133130
: "d" ((old & 0xffff) << shift),
134131
"d" ((new & 0xffff) << shift),
135132
"d" (~(0xffff << shift))
@@ -138,14 +135,14 @@ static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
138135
case 4:
139136
asm volatile(
140137
" cs %0,%3,%1\n"
141-
: "=&d" (prev), "+Q" (*(int *) ptr)
138+
: "=&d" (prev), "+Q" (*(int *) address)
142139
: "0" (old), "d" (new)
143140
: "memory", "cc");
144141
return prev;
145142
case 8:
146143
asm volatile(
147144
" csg %0,%3,%1\n"
148-
: "=&d" (prev), "+S" (*(long *) ptr)
145+
: "=&d" (prev), "+S" (*(long *) address)
149146
: "0" (old), "d" (new)
150147
: "memory", "cc");
151148
return prev;
@@ -159,7 +156,7 @@ static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
159156
__typeof__(*(ptr)) __ret; \
160157
\
161158
__ret = (__typeof__(*(ptr))) \
162-
__cmpxchg((ptr), (unsigned long)(o), \
159+
__cmpxchg((unsigned long)(ptr), (unsigned long)(o), \
163160
(unsigned long)(n), sizeof(*(ptr))); \
164161
__ret; \
165162
})

0 commit comments

Comments
 (0)