Skip to content

Commit 4335edb

Browse files
hcahcaAlexander Gordeev
authored andcommitted
s390: Remove superfluous newlines from inline assemblies
Remove superfluous newlines from inline assemblies. Compilers use the number of lines of inline assemblies as heuristic for the complexity and inline decisions. Therefore inline assemblies should only contain as many lines as required. A lot of inline assemblies contain a superfluous newline for the last line. Remove such newlines to improve compiler inlining decisions. Suggested-by: Juergen Christ <jchrist@linux.ibm.com> Signed-off-by: Heiko Carstens <hca@linux.ibm.com> Reviewed-by: Alexander Gordeev <agordeev@linux.ibm.com> Reviewed-by: Juergen Christ <jchrist@linux.ibm.com> Signed-off-by: Alexander Gordeev <agordeev@linux.ibm.com>
1 parent f0edc8f commit 4335edb

34 files changed

Lines changed: 110 additions & 110 deletions

arch/s390/hypfs/hypfs_sprp.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ static inline unsigned long __hypfs_sprp_diag304(void *data, unsigned long cmd)
2727
{
2828
union register_pair r1 = { .even = virt_to_phys(data), };
2929

30-
asm volatile("diag %[r1],%[r3],0x304\n"
30+
asm volatile("diag %[r1],%[r3],0x304"
3131
: [r1] "+&d" (r1.pair)
3232
: [r3] "d" (cmd)
3333
: "memory");

arch/s390/include/asm/ap.h

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ static inline struct ap_queue_status ap_tapq(ap_qid_t qid,
143143
" lghi 2,0\n" /* 0 into gr2 */
144144
" .insn rre,0xb2af0000,0,0\n" /* PQAP(TAPQ) */
145145
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
146-
" lgr %[reg2],2\n" /* gr2 into reg2 */
146+
" lgr %[reg2],2" /* gr2 into reg2 */
147147
: [reg1] "=&d" (reg1.value), [reg2] "=&d" (reg2)
148148
: [qid] "d" (qid)
149149
: "cc", "0", "1", "2");
@@ -186,7 +186,7 @@ static inline struct ap_queue_status ap_rapq(ap_qid_t qid, int fbit)
186186
asm volatile(
187187
" lgr 0,%[reg0]\n" /* qid arg into gr0 */
188188
" .insn rre,0xb2af0000,0,0\n" /* PQAP(RAPQ) */
189-
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
189+
" lgr %[reg1],1" /* gr1 (status) into reg1 */
190190
: [reg1] "=&d" (reg1.value)
191191
: [reg0] "d" (reg0)
192192
: "cc", "0", "1");
@@ -211,7 +211,7 @@ static inline struct ap_queue_status ap_zapq(ap_qid_t qid, int fbit)
211211
asm volatile(
212212
" lgr 0,%[reg0]\n" /* qid arg into gr0 */
213213
" .insn rre,0xb2af0000,0,0\n" /* PQAP(ZAPQ) */
214-
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
214+
" lgr %[reg1],1" /* gr1 (status) into reg1 */
215215
: [reg1] "=&d" (reg1.value)
216216
: [reg0] "d" (reg0)
217217
: "cc", "0", "1");
@@ -315,7 +315,7 @@ static inline struct ap_queue_status ap_aqic(ap_qid_t qid,
315315
" lgr 1,%[reg1]\n" /* irq ctrl into gr1 */
316316
" lgr 2,%[reg2]\n" /* ni addr into gr2 */
317317
" .insn rre,0xb2af0000,0,0\n" /* PQAP(AQIC) */
318-
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
318+
" lgr %[reg1],1" /* gr1 (status) into reg1 */
319319
: [reg1] "+&d" (reg1.value)
320320
: [reg0] "d" (reg0), [reg2] "d" (reg2)
321321
: "cc", "memory", "0", "1", "2");
@@ -363,7 +363,7 @@ static inline struct ap_queue_status ap_qact(ap_qid_t qid, int ifbit,
363363
" lgr 1,%[reg1]\n" /* qact in info into gr1 */
364364
" .insn rre,0xb2af0000,0,0\n" /* PQAP(QACT) */
365365
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
366-
" lgr %[reg2],2\n" /* qact out info into reg2 */
366+
" lgr %[reg2],2" /* qact out info into reg2 */
367367
: [reg1] "+&d" (reg1.value), [reg2] "=&d" (reg2)
368368
: [reg0] "d" (reg0)
369369
: "cc", "0", "1", "2");
@@ -388,7 +388,7 @@ static inline struct ap_queue_status ap_bapq(ap_qid_t qid)
388388
asm volatile(
389389
" lgr 0,%[reg0]\n" /* qid arg into gr0 */
390390
" .insn rre,0xb2af0000,0,0\n" /* PQAP(BAPQ) */
391-
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
391+
" lgr %[reg1],1" /* gr1 (status) into reg1 */
392392
: [reg1] "=&d" (reg1.value)
393393
: [reg0] "d" (reg0)
394394
: "cc", "0", "1");
@@ -416,7 +416,7 @@ static inline struct ap_queue_status ap_aapq(ap_qid_t qid, unsigned int sec_idx)
416416
" lgr 0,%[reg0]\n" /* qid arg into gr0 */
417417
" lgr 2,%[reg2]\n" /* secret index into gr2 */
418418
" .insn rre,0xb2af0000,0,0\n" /* PQAP(AAPQ) */
419-
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
419+
" lgr %[reg1],1" /* gr1 (status) into reg1 */
420420
: [reg1] "=&d" (reg1.value)
421421
: [reg0] "d" (reg0), [reg2] "d" (reg2)
422422
: "cc", "0", "1", "2");
@@ -453,7 +453,7 @@ static inline struct ap_queue_status ap_nqap(ap_qid_t qid,
453453
" lgr 0,%[reg0]\n" /* qid param in gr0 */
454454
"0: .insn rre,0xb2ad0000,%[nqap_r1],%[nqap_r2]\n"
455455
" brc 2,0b\n" /* handle partial completion */
456-
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
456+
" lgr %[reg1],1" /* gr1 (status) into reg1 */
457457
: [reg0] "+&d" (reg0), [reg1] "=&d" (reg1.value),
458458
[nqap_r2] "+&d" (nqap_r2.pair)
459459
: [nqap_r1] "d" (nqap_r1.pair)
@@ -518,7 +518,7 @@ static inline struct ap_queue_status ap_dqap(ap_qid_t qid,
518518
" brc 6,0b\n" /* handle partial complete */
519519
"2: lgr %[reg0],0\n" /* gr0 (qid + info) into reg0 */
520520
" lgr %[reg1],1\n" /* gr1 (status) into reg1 */
521-
" lgr %[reg2],2\n" /* gr2 (res length) into reg2 */
521+
" lgr %[reg2],2" /* gr2 (res length) into reg2 */
522522
: [reg0] "+&d" (reg0), [reg1] "=&d" (reg1.value),
523523
[reg2] "=&d" (reg2), [rp1] "+&d" (rp1.pair),
524524
[rp2] "+&d" (rp2.pair)

arch/s390/include/asm/atomic_ops.h

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ static __always_inline int __atomic_read(const int *ptr)
1717
int val;
1818

1919
asm volatile(
20-
" l %[val],%[ptr]\n"
20+
" l %[val],%[ptr]"
2121
: [val] "=d" (val) : [ptr] "R" (*ptr));
2222
return val;
2323
}
@@ -26,11 +26,11 @@ static __always_inline void __atomic_set(int *ptr, int val)
2626
{
2727
if (__builtin_constant_p(val) && val >= S16_MIN && val <= S16_MAX) {
2828
asm volatile(
29-
" mvhi %[ptr],%[val]\n"
29+
" mvhi %[ptr],%[val]"
3030
: [ptr] "=Q" (*ptr) : [val] "K" (val));
3131
} else {
3232
asm volatile(
33-
" st %[val],%[ptr]\n"
33+
" st %[val],%[ptr]"
3434
: [ptr] "=R" (*ptr) : [val] "d" (val));
3535
}
3636
}
@@ -40,7 +40,7 @@ static __always_inline long __atomic64_read(const long *ptr)
4040
long val;
4141

4242
asm volatile(
43-
" lg %[val],%[ptr]\n"
43+
" lg %[val],%[ptr]"
4444
: [val] "=d" (val) : [ptr] "RT" (*ptr));
4545
return val;
4646
}
@@ -49,11 +49,11 @@ static __always_inline void __atomic64_set(long *ptr, long val)
4949
{
5050
if (__builtin_constant_p(val) && val >= S16_MIN && val <= S16_MAX) {
5151
asm volatile(
52-
" mvghi %[ptr],%[val]\n"
52+
" mvghi %[ptr],%[val]"
5353
: [ptr] "=Q" (*ptr) : [val] "K" (val));
5454
} else {
5555
asm volatile(
56-
" stg %[val],%[ptr]\n"
56+
" stg %[val],%[ptr]"
5757
: [ptr] "=RT" (*ptr) : [val] "d" (val));
5858
}
5959
}
@@ -66,7 +66,7 @@ static __always_inline op_type op_name(op_type val, op_type *ptr) \
6666
op_type old; \
6767
\
6868
asm volatile( \
69-
op_string " %[old],%[val],%[ptr]\n" \
69+
op_string " %[old],%[val],%[ptr]" \
7070
op_barrier \
7171
: [old] "=d" (old), [ptr] "+QS" (*ptr) \
7272
: [val] "d" (val) : "cc", "memory"); \
@@ -75,7 +75,7 @@ static __always_inline op_type op_name(op_type val, op_type *ptr) \
7575

7676
#define __ATOMIC_OPS(op_name, op_type, op_string) \
7777
__ATOMIC_OP(op_name, op_type, op_string, "") \
78-
__ATOMIC_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
78+
__ATOMIC_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
7979

8080
__ATOMIC_OPS(__atomic_add, int, "laa")
8181
__ATOMIC_OPS(__atomic_and, int, "lan")
@@ -94,14 +94,14 @@ __ATOMIC_OPS(__atomic64_xor, long, "laxg")
9494
static __always_inline void op_name(op_type val, op_type *ptr) \
9595
{ \
9696
asm volatile( \
97-
op_string " %[ptr],%[val]\n" \
97+
op_string " %[ptr],%[val]" \
9898
op_barrier \
9999
: [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\
100100
}
101101

102102
#define __ATOMIC_CONST_OPS(op_name, op_type, op_string) \
103103
__ATOMIC_CONST_OP(op_name, op_type, op_string, "") \
104-
__ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
104+
__ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
105105

106106
__ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
107107
__ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
@@ -179,7 +179,7 @@ static __always_inline bool op_name(op_type val, op_type *ptr) \
179179
int cc; \
180180
\
181181
asm volatile( \
182-
op_string " %[tmp],%[val],%[ptr]\n" \
182+
op_string " %[tmp],%[val],%[ptr]" \
183183
op_barrier \
184184
: "=@cc" (cc), [tmp] "=d" (tmp), [ptr] "+QS" (*ptr) \
185185
: [val] "d" (val) \
@@ -189,7 +189,7 @@ static __always_inline bool op_name(op_type val, op_type *ptr) \
189189

190190
#define __ATOMIC_TEST_OPS(op_name, op_type, op_string) \
191191
__ATOMIC_TEST_OP(op_name, op_type, op_string, "") \
192-
__ATOMIC_TEST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
192+
__ATOMIC_TEST_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
193193

194194
__ATOMIC_TEST_OPS(__atomic_add_and_test, int, "laal")
195195
__ATOMIC_TEST_OPS(__atomic64_add_and_test, long, "laalg")
@@ -203,7 +203,7 @@ static __always_inline bool op_name(op_type val, op_type *ptr) \
203203
int cc; \
204204
\
205205
asm volatile( \
206-
op_string " %[ptr],%[val]\n" \
206+
op_string " %[ptr],%[val]" \
207207
op_barrier \
208208
: "=@cc" (cc), [ptr] "+QS" (*ptr) \
209209
: [val] "i" (val) \
@@ -213,7 +213,7 @@ static __always_inline bool op_name(op_type val, op_type *ptr) \
213213

214214
#define __ATOMIC_CONST_TEST_OPS(op_name, op_type, op_string) \
215215
__ATOMIC_CONST_TEST_OP(op_name, op_type, op_string, "") \
216-
__ATOMIC_CONST_TEST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
216+
__ATOMIC_CONST_TEST_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
217217

218218
__ATOMIC_CONST_TEST_OPS(__atomic_add_const_and_test, int, "alsi")
219219
__ATOMIC_CONST_TEST_OPS(__atomic64_add_const_and_test, long, "algsi")

arch/s390/include/asm/barrier.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818

1919
#ifdef MARCH_HAS_Z196_FEATURES
2020
/* Fast-BCR without checkpoint synchronization */
21-
#define __ASM_BCR_SERIALIZE "bcr 14,0\n"
21+
#define __ASM_BCR_SERIALIZE "bcr 14,0"
2222
#else
23-
#define __ASM_BCR_SERIALIZE "bcr 15,0\n"
23+
#define __ASM_BCR_SERIALIZE "bcr 15,0"
2424
#endif
2525

2626
static __always_inline void bcr_serialize(void)
@@ -69,12 +69,12 @@ static inline unsigned long array_index_mask_nospec(unsigned long index,
6969

7070
if (__builtin_constant_p(size) && size > 0) {
7171
asm(" clgr %2,%1\n"
72-
" slbgr %0,%0\n"
72+
" slbgr %0,%0"
7373
:"=d" (mask) : "d" (size-1), "d" (index) :"cc");
7474
return mask;
7575
}
7676
asm(" clgr %1,%2\n"
77-
" slbgr %0,%0\n"
77+
" slbgr %0,%0"
7878
:"=d" (mask) : "d" (size), "d" (index) :"cc");
7979
return ~mask;
8080
}

arch/s390/include/asm/bitops.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ static __always_inline bool arch_test_bit(unsigned long nr, const volatile unsig
6262
addr += (nr ^ (BITS_PER_LONG - BITS_PER_BYTE)) / BITS_PER_BYTE;
6363
mask = 1UL << (nr & (BITS_PER_BYTE - 1));
6464
asm volatile(
65-
" tm %[addr],%[mask]\n"
65+
" tm %[addr],%[mask]"
6666
: "=@cc" (cc)
6767
: [addr] "Q" (*addr), [mask] "I" (mask)
6868
);

arch/s390/include/asm/checksum.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ static inline __wsum cksm(const void *buff, int len, __wsum sum)
2727
kmsan_check_memory(buff, len);
2828
asm volatile(
2929
"0: cksm %[sum],%[rp]\n"
30-
" jo 0b\n"
30+
" jo 0b"
3131
: [sum] "+&d" (sum), [rp] "+&d" (rp.pair) : : "cc", "memory");
3232
return sum;
3333
}

arch/s390/include/asm/cmpxchg.h

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ void __cmpxchg_called_with_bad_pointer(void);
1818
static __always_inline u32 __cs_asm(u64 ptr, u32 old, u32 new)
1919
{
2020
asm volatile(
21-
" cs %[old],%[new],%[ptr]\n"
21+
" cs %[old],%[new],%[ptr]"
2222
: [old] "+d" (old), [ptr] "+Q" (*(u32 *)ptr)
2323
: [new] "d" (new)
2424
: "memory", "cc");
@@ -28,7 +28,7 @@ static __always_inline u32 __cs_asm(u64 ptr, u32 old, u32 new)
2828
static __always_inline u64 __csg_asm(u64 ptr, u64 old, u64 new)
2929
{
3030
asm volatile(
31-
" csg %[old],%[new],%[ptr]\n"
31+
" csg %[old],%[new],%[ptr]"
3232
: [old] "+d" (old), [ptr] "+QS" (*(u64 *)ptr)
3333
: [new] "d" (new)
3434
: "memory", "cc");
@@ -126,7 +126,7 @@ static __always_inline u64 __arch_cmpxchg(u64 ptr, u64 old, u64 new, int size)
126126
} \
127127
case 4: { \
128128
asm volatile( \
129-
" cs %[__old],%[__new],%[__ptr]\n" \
129+
" cs %[__old],%[__new],%[__ptr]" \
130130
: [__old] "+d" (*__oldp), \
131131
[__ptr] "+Q" (*(ptr)), \
132132
"=@cc" (__cc) \
@@ -136,7 +136,7 @@ static __always_inline u64 __arch_cmpxchg(u64 ptr, u64 old, u64 new, int size)
136136
} \
137137
case 8: { \
138138
asm volatile( \
139-
" csg %[__old],%[__new],%[__ptr]\n" \
139+
" csg %[__old],%[__new],%[__ptr]" \
140140
: [__old] "+d" (*__oldp), \
141141
[__ptr] "+QS" (*(ptr)), \
142142
"=@cc" (__cc) \
@@ -241,7 +241,7 @@ static __always_inline u64 __arch_xchg(u64 ptr, u64 x, int size)
241241
static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
242242
{
243243
asm volatile(
244-
" cdsg %[old],%[new],%[ptr]\n"
244+
" cdsg %[old],%[new],%[ptr]"
245245
: [old] "+d" (old), [ptr] "+QS" (*ptr)
246246
: [new] "d" (new)
247247
: "memory", "cc");
@@ -258,7 +258,7 @@ static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp,
258258
int cc;
259259

260260
asm volatile(
261-
" cdsg %[old],%[new],%[ptr]\n"
261+
" cdsg %[old],%[new],%[ptr]"
262262
: [old] "+d" (*oldp), [ptr] "+QS" (*ptr), "=@cc" (cc)
263263
: [new] "d" (new)
264264
: "memory");

0 commit comments

Comments
 (0)