Skip to content

Commit 6000b5f

Browse files
committed
s390/bitops: make bitops only work on longs
The bitops code was optimized to generate test under mask instructions with the __bitops_byte() helper. However that was many years ago and in the meantime a lot of new instructions were introduced. Changing the code so that it always operates on longs nowadays even generates shorter code (~ -20kb, defconfig, gcc 10, march=zE12). Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
1 parent 17a363d commit 6000b5f

1 file changed

Lines changed: 47 additions & 46 deletions

File tree

arch/s390/include/asm/bitops.h

Lines changed: 47 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -42,145 +42,146 @@
4242
#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
4343

4444
static inline unsigned long *
45-
__bitops_word(unsigned long nr, volatile unsigned long *ptr)
45+
__bitops_word(unsigned long nr, const volatile unsigned long *ptr)
4646
{
4747
unsigned long addr;
4848

4949
addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
5050
return (unsigned long *)addr;
5151
}
5252

53-
static inline unsigned char *
54-
__bitops_byte(unsigned long nr, volatile unsigned long *ptr)
53+
static inline unsigned long __bitops_mask(unsigned long nr)
5554
{
56-
return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
55+
return 1UL << (nr & (BITS_PER_LONG - 1));
5756
}
5857

5958
static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
6059
{
6160
unsigned long *addr = __bitops_word(nr, ptr);
62-
unsigned long mask;
61+
unsigned long mask = __bitops_mask(nr);
6362

64-
mask = 1UL << (nr & (BITS_PER_LONG - 1));
6563
__atomic64_or(mask, (long *)addr);
6664
}
6765

6866
static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
6967
{
7068
unsigned long *addr = __bitops_word(nr, ptr);
71-
unsigned long mask;
69+
unsigned long mask = __bitops_mask(nr);
7270

73-
mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
74-
__atomic64_and(mask, (long *)addr);
71+
__atomic64_and(~mask, (long *)addr);
7572
}
7673

7774
static __always_inline void arch_change_bit(unsigned long nr,
7875
volatile unsigned long *ptr)
7976
{
8077
unsigned long *addr = __bitops_word(nr, ptr);
81-
unsigned long mask;
78+
unsigned long mask = __bitops_mask(nr);
8279

83-
mask = 1UL << (nr & (BITS_PER_LONG - 1));
8480
__atomic64_xor(mask, (long *)addr);
8581
}
8682

8783
static inline bool arch_test_and_set_bit(unsigned long nr,
8884
volatile unsigned long *ptr)
8985
{
9086
unsigned long *addr = __bitops_word(nr, ptr);
91-
unsigned long old, mask;
87+
unsigned long mask = __bitops_mask(nr);
88+
unsigned long old;
9289

93-
mask = 1UL << (nr & (BITS_PER_LONG - 1));
9490
old = __atomic64_or_barrier(mask, (long *)addr);
95-
return (old & mask) != 0;
91+
return old & mask;
9692
}
9793

9894
static inline bool arch_test_and_clear_bit(unsigned long nr,
9995
volatile unsigned long *ptr)
10096
{
10197
unsigned long *addr = __bitops_word(nr, ptr);
102-
unsigned long old, mask;
98+
unsigned long mask = __bitops_mask(nr);
99+
unsigned long old;
103100

104-
mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
105-
old = __atomic64_and_barrier(mask, (long *)addr);
106-
return (old & ~mask) != 0;
101+
old = __atomic64_and_barrier(~mask, (long *)addr);
102+
return old & mask;
107103
}
108104

109105
static inline bool arch_test_and_change_bit(unsigned long nr,
110106
volatile unsigned long *ptr)
111107
{
112108
unsigned long *addr = __bitops_word(nr, ptr);
113-
unsigned long old, mask;
109+
unsigned long mask = __bitops_mask(nr);
110+
unsigned long old;
114111

115-
mask = 1UL << (nr & (BITS_PER_LONG - 1));
116112
old = __atomic64_xor_barrier(mask, (long *)addr);
117-
return (old & mask) != 0;
113+
return old & mask;
118114
}
119115

120116
static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr)
121117
{
122-
unsigned char *addr = __bitops_byte(nr, ptr);
118+
unsigned long *addr = __bitops_word(nr, ptr);
119+
unsigned long mask = __bitops_mask(nr);
123120

124-
*addr |= 1 << (nr & 7);
121+
*addr |= mask;
125122
}
126123

127124
static inline void arch___clear_bit(unsigned long nr,
128125
volatile unsigned long *ptr)
129126
{
130-
unsigned char *addr = __bitops_byte(nr, ptr);
127+
unsigned long *addr = __bitops_word(nr, ptr);
128+
unsigned long mask = __bitops_mask(nr);
131129

132-
*addr &= ~(1 << (nr & 7));
130+
*addr &= ~mask;
133131
}
134132

135133
static inline void arch___change_bit(unsigned long nr,
136134
volatile unsigned long *ptr)
137135
{
138-
unsigned char *addr = __bitops_byte(nr, ptr);
136+
unsigned long *addr = __bitops_word(nr, ptr);
137+
unsigned long mask = __bitops_mask(nr);
139138

140-
*addr ^= 1 << (nr & 7);
139+
*addr ^= mask;
141140
}
142141

143142
static inline bool arch___test_and_set_bit(unsigned long nr,
144143
volatile unsigned long *ptr)
145144
{
146-
unsigned char *addr = __bitops_byte(nr, ptr);
147-
unsigned char ch;
145+
unsigned long *addr = __bitops_word(nr, ptr);
146+
unsigned long mask = __bitops_mask(nr);
147+
unsigned long old;
148148

149-
ch = *addr;
150-
*addr |= 1 << (nr & 7);
151-
return (ch >> (nr & 7)) & 1;
149+
old = *addr;
150+
*addr |= mask;
151+
return old & mask;
152152
}
153153

154154
static inline bool arch___test_and_clear_bit(unsigned long nr,
155155
volatile unsigned long *ptr)
156156
{
157-
unsigned char *addr = __bitops_byte(nr, ptr);
158-
unsigned char ch;
157+
unsigned long *addr = __bitops_word(nr, ptr);
158+
unsigned long mask = __bitops_mask(nr);
159+
unsigned long old;
159160

160-
ch = *addr;
161-
*addr &= ~(1 << (nr & 7));
162-
return (ch >> (nr & 7)) & 1;
161+
old = *addr;
162+
*addr &= ~mask;
163+
return old & mask;
163164
}
164165

165166
static inline bool arch___test_and_change_bit(unsigned long nr,
166167
volatile unsigned long *ptr)
167168
{
168-
unsigned char *addr = __bitops_byte(nr, ptr);
169-
unsigned char ch;
169+
unsigned long *addr = __bitops_word(nr, ptr);
170+
unsigned long mask = __bitops_mask(nr);
171+
unsigned long old;
170172

171-
ch = *addr;
172-
*addr ^= 1 << (nr & 7);
173-
return (ch >> (nr & 7)) & 1;
173+
old = *addr;
174+
*addr ^= mask;
175+
return old & mask;
174176
}
175177

176178
static inline bool arch_test_bit(unsigned long nr,
177179
const volatile unsigned long *ptr)
178180
{
179-
const volatile unsigned char *addr;
181+
const volatile unsigned long *addr = __bitops_word(nr, ptr);
182+
unsigned long mask = __bitops_mask(nr);
180183

181-
addr = ((const volatile unsigned char *)ptr);
182-
addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
183-
return (*addr >> (nr & 7)) & 1;
184+
return *addr & mask;
184185
}
185186

186187
static inline bool arch_test_and_set_bit_lock(unsigned long nr,

0 commit comments

Comments
 (0)