x86/asm: Add instruction suffixes to bitops

Omitting suffixes from instructions in AT&T mode is bad practice when
operand size cannot be determined by the assembler from register
operands, and is likely going to be warned about by upstream gas in the
future (mine does already). Add the missing suffixes here. Note that for
64-bit this means some operations change from being 32-bit to 64-bit.

Signed-off-by: Jan Beulich <jbeulich@suse.com>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Link: https://lkml.kernel.org/r/5A93F98702000078001ABACC@prv-mh.provo.novell.com

authored by Jan Beulich and committed by Thomas Gleixner 22636f8c a368d7fd

Changed files
+17 -14
arch
x86
include
+16 -13
arch/x86/include/asm/bitops.h
··· 78 78 : "iq" ((u8)CONST_MASK(nr)) 79 79 : "memory"); 80 80 } else { 81 - asm volatile(LOCK_PREFIX "bts %1,%0" 81 + asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0" 82 82 : BITOP_ADDR(addr) : "Ir" (nr) : "memory"); 83 83 } 84 84 } ··· 94 94 */ 95 95 static __always_inline void __set_bit(long nr, volatile unsigned long *addr) 96 96 { 97 - asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory"); 97 + asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory"); 98 98 } 99 99 100 100 /** ··· 115 115 : CONST_MASK_ADDR(nr, addr) 116 116 : "iq" ((u8)~CONST_MASK(nr))); 117 117 } else { 118 - asm volatile(LOCK_PREFIX "btr %1,%0" 118 + asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0" 119 119 : BITOP_ADDR(addr) 120 120 : "Ir" (nr)); 121 121 } ··· 137 137 138 138 static __always_inline void __clear_bit(long nr, volatile unsigned long *addr) 139 139 { 140 - asm volatile("btr %1,%0" : ADDR : "Ir" (nr)); 140 + asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr)); 141 141 } 142 142 143 143 static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr) ··· 182 182 */ 183 183 static __always_inline void __change_bit(long nr, volatile unsigned long *addr) 184 184 { 185 - asm volatile("btc %1,%0" : ADDR : "Ir" (nr)); 185 + asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr)); 186 186 } 187 187 188 188 /** ··· 201 201 : CONST_MASK_ADDR(nr, addr) 202 202 : "iq" ((u8)CONST_MASK(nr))); 203 203 } else { 204 - asm volatile(LOCK_PREFIX "btc %1,%0" 204 + asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0" 205 205 : BITOP_ADDR(addr) 206 206 : "Ir" (nr)); 207 207 } ··· 217 217 */ 218 218 static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr) 219 219 { 220 - GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c); 220 + GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), 221 + *addr, "Ir", nr, "%0", c); 221 222 } 222 223 223 224 /** ··· 247 246 { 248 247 bool oldbit; 249 248 250 - asm("bts %2,%1" 249 + asm(__ASM_SIZE(bts) " %2,%1" 251 250 CC_SET(c) 252 251 : CC_OUT(c) (oldbit), ADDR 253 252 : "Ir" (nr)); ··· 264 263 */ 265 264 static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr) 266 265 { 267 - GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c); 266 + GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), 267 + *addr, "Ir", nr, "%0", c); 268 268 } 269 269 270 270 /** ··· 288 286 { 289 287 bool oldbit; 290 288 291 - asm volatile("btr %2,%1" 289 + asm volatile(__ASM_SIZE(btr) " %2,%1" 292 290 CC_SET(c) 293 291 : CC_OUT(c) (oldbit), ADDR 294 292 : "Ir" (nr)); ··· 300 298 { 301 299 bool oldbit; 302 300 303 - asm volatile("btc %2,%1" 301 + asm volatile(__ASM_SIZE(btc) " %2,%1" 304 302 CC_SET(c) 305 303 : CC_OUT(c) (oldbit), ADDR 306 304 : "Ir" (nr) : "memory"); ··· 318 316 */ 319 317 static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr) 320 318 { 321 - GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c); 319 + GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), 320 + *addr, "Ir", nr, "%0", c); 322 321 } 323 322 324 323 static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr) ··· 332 329 { 333 330 bool oldbit; 334 331 335 - asm volatile("bt %2,%1" 332 + asm volatile(__ASM_SIZE(bt) " %2,%1" 336 333 CC_SET(c) 337 334 : CC_OUT(c) (oldbit) 338 335 : "m" (*(unsigned long *)addr), "Ir" (nr));
+1 -1
arch/x86/include/asm/percpu.h
··· 526 526 { 527 527 bool oldbit; 528 528 529 - asm volatile("bt "__percpu_arg(2)",%1" 529 + asm volatile("btl "__percpu_arg(2)",%1" 530 530 CC_SET(c) 531 531 : CC_OUT(c) (oldbit) 532 532 : "m" (*(unsigned long __percpu *)addr), "Ir" (nr));