|
42 | 42 | #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG) |
43 | 43 |
|
44 | 44 | static inline unsigned long * |
45 | | -__bitops_word(unsigned long nr, volatile unsigned long *ptr) |
| 45 | +__bitops_word(unsigned long nr, const volatile unsigned long *ptr) |
46 | 46 | { |
47 | 47 | unsigned long addr; |
48 | 48 |
|
49 | 49 | addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3); |
50 | 50 | return (unsigned long *)addr; |
51 | 51 | } |
52 | 52 |
|
53 | | -static inline unsigned char * |
54 | | -__bitops_byte(unsigned long nr, volatile unsigned long *ptr) |
| 53 | +static inline unsigned long __bitops_mask(unsigned long nr) |
55 | 54 | { |
56 | | - return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3); |
| 55 | + return 1UL << (nr & (BITS_PER_LONG - 1)); |
57 | 56 | } |
58 | 57 |
|
59 | 58 | static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr) |
60 | 59 | { |
61 | 60 | unsigned long *addr = __bitops_word(nr, ptr); |
62 | | - unsigned long mask; |
| 61 | + unsigned long mask = __bitops_mask(nr); |
63 | 62 |
|
64 | | - mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
65 | 63 | __atomic64_or(mask, (long *)addr); |
66 | 64 | } |
67 | 65 |
|
68 | 66 | static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr) |
69 | 67 | { |
70 | 68 | unsigned long *addr = __bitops_word(nr, ptr); |
71 | | - unsigned long mask; |
| 69 | + unsigned long mask = __bitops_mask(nr); |
72 | 70 |
|
73 | | - mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); |
74 | | - __atomic64_and(mask, (long *)addr); |
| 71 | + __atomic64_and(~mask, (long *)addr); |
75 | 72 | } |
76 | 73 |
|
77 | 74 | static __always_inline void arch_change_bit(unsigned long nr, |
78 | 75 | volatile unsigned long *ptr) |
79 | 76 | { |
80 | 77 | unsigned long *addr = __bitops_word(nr, ptr); |
81 | | - unsigned long mask; |
| 78 | + unsigned long mask = __bitops_mask(nr); |
82 | 79 |
|
83 | | - mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
84 | 80 | __atomic64_xor(mask, (long *)addr); |
85 | 81 | } |
86 | 82 |
|
87 | 83 | static inline bool arch_test_and_set_bit(unsigned long nr, |
88 | 84 | volatile unsigned long *ptr) |
89 | 85 | { |
90 | 86 | unsigned long *addr = __bitops_word(nr, ptr); |
91 | | - unsigned long old, mask; |
| 87 | + unsigned long mask = __bitops_mask(nr); |
| 88 | + unsigned long old; |
92 | 89 |
|
93 | | - mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
94 | 90 | old = __atomic64_or_barrier(mask, (long *)addr); |
95 | | - return (old & mask) != 0; |
| 91 | + return old & mask; |
96 | 92 | } |
97 | 93 |
|
98 | 94 | static inline bool arch_test_and_clear_bit(unsigned long nr, |
99 | 95 | volatile unsigned long *ptr) |
100 | 96 | { |
101 | 97 | unsigned long *addr = __bitops_word(nr, ptr); |
102 | | - unsigned long old, mask; |
| 98 | + unsigned long mask = __bitops_mask(nr); |
| 99 | + unsigned long old; |
103 | 100 |
|
104 | | - mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); |
105 | | - old = __atomic64_and_barrier(mask, (long *)addr); |
106 | | - return (old & ~mask) != 0; |
| 101 | + old = __atomic64_and_barrier(~mask, (long *)addr); |
| 102 | + return old & mask; |
107 | 103 | } |
108 | 104 |
|
109 | 105 | static inline bool arch_test_and_change_bit(unsigned long nr, |
110 | 106 | volatile unsigned long *ptr) |
111 | 107 | { |
112 | 108 | unsigned long *addr = __bitops_word(nr, ptr); |
113 | | - unsigned long old, mask; |
| 109 | + unsigned long mask = __bitops_mask(nr); |
| 110 | + unsigned long old; |
114 | 111 |
|
115 | | - mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
116 | 112 | old = __atomic64_xor_barrier(mask, (long *)addr); |
117 | | - return (old & mask) != 0; |
| 113 | + return old & mask; |
118 | 114 | } |
119 | 115 |
|
120 | 116 | static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr) |
121 | 117 | { |
122 | | - unsigned char *addr = __bitops_byte(nr, ptr); |
| 118 | + unsigned long *addr = __bitops_word(nr, ptr); |
| 119 | + unsigned long mask = __bitops_mask(nr); |
123 | 120 |
|
124 | | - *addr |= 1 << (nr & 7); |
| 121 | + *addr |= mask; |
125 | 122 | } |
126 | 123 |
|
127 | 124 | static inline void arch___clear_bit(unsigned long nr, |
128 | 125 | volatile unsigned long *ptr) |
129 | 126 | { |
130 | | - unsigned char *addr = __bitops_byte(nr, ptr); |
| 127 | + unsigned long *addr = __bitops_word(nr, ptr); |
| 128 | + unsigned long mask = __bitops_mask(nr); |
131 | 129 |
|
132 | | - *addr &= ~(1 << (nr & 7)); |
| 130 | + *addr &= ~mask; |
133 | 131 | } |
134 | 132 |
|
135 | 133 | static inline void arch___change_bit(unsigned long nr, |
136 | 134 | volatile unsigned long *ptr) |
137 | 135 | { |
138 | | - unsigned char *addr = __bitops_byte(nr, ptr); |
| 136 | + unsigned long *addr = __bitops_word(nr, ptr); |
| 137 | + unsigned long mask = __bitops_mask(nr); |
139 | 138 |
|
140 | | - *addr ^= 1 << (nr & 7); |
| 139 | + *addr ^= mask; |
141 | 140 | } |
142 | 141 |
|
143 | 142 | static inline bool arch___test_and_set_bit(unsigned long nr, |
144 | 143 | volatile unsigned long *ptr) |
145 | 144 | { |
146 | | - unsigned char *addr = __bitops_byte(nr, ptr); |
147 | | - unsigned char ch; |
| 145 | + unsigned long *addr = __bitops_word(nr, ptr); |
| 146 | + unsigned long mask = __bitops_mask(nr); |
| 147 | + unsigned long old; |
148 | 148 |
|
149 | | - ch = *addr; |
150 | | - *addr |= 1 << (nr & 7); |
151 | | - return (ch >> (nr & 7)) & 1; |
| 149 | + old = *addr; |
| 150 | + *addr |= mask; |
| 151 | + return old & mask; |
152 | 152 | } |
153 | 153 |
|
154 | 154 | static inline bool arch___test_and_clear_bit(unsigned long nr, |
155 | 155 | volatile unsigned long *ptr) |
156 | 156 | { |
157 | | - unsigned char *addr = __bitops_byte(nr, ptr); |
158 | | - unsigned char ch; |
| 157 | + unsigned long *addr = __bitops_word(nr, ptr); |
| 158 | + unsigned long mask = __bitops_mask(nr); |
| 159 | + unsigned long old; |
159 | 160 |
|
160 | | - ch = *addr; |
161 | | - *addr &= ~(1 << (nr & 7)); |
162 | | - return (ch >> (nr & 7)) & 1; |
| 161 | + old = *addr; |
| 162 | + *addr &= ~mask; |
| 163 | + return old & mask; |
163 | 164 | } |
164 | 165 |
|
165 | 166 | static inline bool arch___test_and_change_bit(unsigned long nr, |
166 | 167 | volatile unsigned long *ptr) |
167 | 168 | { |
168 | | - unsigned char *addr = __bitops_byte(nr, ptr); |
169 | | - unsigned char ch; |
| 169 | + unsigned long *addr = __bitops_word(nr, ptr); |
| 170 | + unsigned long mask = __bitops_mask(nr); |
| 171 | + unsigned long old; |
170 | 172 |
|
171 | | - ch = *addr; |
172 | | - *addr ^= 1 << (nr & 7); |
173 | | - return (ch >> (nr & 7)) & 1; |
| 173 | + old = *addr; |
| 174 | + *addr ^= mask; |
| 175 | + return old & mask; |
174 | 176 | } |
175 | 177 |
|
176 | 178 | static inline bool arch_test_bit(unsigned long nr, |
177 | 179 | const volatile unsigned long *ptr) |
178 | 180 | { |
179 | | - const volatile unsigned char *addr; |
| 181 | + const volatile unsigned long *addr = __bitops_word(nr, ptr); |
| 182 | + unsigned long mask = __bitops_mask(nr); |
180 | 183 |
|
181 | | - addr = ((const volatile unsigned char *)ptr); |
182 | | - addr += (nr ^ (BITS_PER_LONG - 8)) >> 3; |
183 | | - return (*addr >> (nr & 7)) & 1; |
| 184 | + return *addr & mask; |
184 | 185 | } |
185 | 186 |
|
186 | 187 | static inline bool arch_test_and_set_bit_lock(unsigned long nr, |
|
0 commit comments