Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

s390/lib: add missing memory barriers to string inline assemblies

We have a couple of inline assemblies like memchr() and strlen() that
read from memory, but tell the compiler only they need the addresses
of the strings they access.
This allows the compiler to omit the initialization of such strings
and therefore generate broken code. Add the missing memory barrier to
all string related inline assemblies to fix this potential issue. It
looks like the compiler currently does not generate broken code due to
these bugs.

Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>

authored by

Heiko Carstens and committed by
Martin Schwidefsky
7a71fd1c 259acc5c

+10 -10
+4 -4
arch/s390/include/asm/string.h
··· 62 62 " jl 1f\n" 63 63 " la %0,0\n" 64 64 "1:" 65 - : "+a" (ret), "+&a" (s) : "d" (r0) : "cc"); 65 + : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 66 66 return (void *) ret; 67 67 } 68 68 ··· 74 74 asm volatile( 75 75 "0: srst %0,%1\n" 76 76 " jo 0b\n" 77 - : "+a" (ret), "+&a" (s) : "d" (r0) : "cc"); 77 + : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 78 78 return (void *) ret; 79 79 } 80 80 ··· 115 115 asm volatile( 116 116 "0: srst %0,%1\n" 117 117 " jo 0b" 118 - : "+d" (r0), "+a" (tmp) : : "cc"); 118 + : "+d" (r0), "+a" (tmp) : : "cc", "memory"); 119 119 return r0 - (unsigned long) s; 120 120 } 121 121 ··· 128 128 asm volatile( 129 129 "0: srst %0,%1\n" 130 130 " jo 0b" 131 - : "+a" (end), "+a" (tmp) : "d" (r0) : "cc"); 131 + : "+a" (end), "+a" (tmp) : "d" (r0) : "cc", "memory"); 132 132 return end - s; 133 133 } 134 134 #else /* IN_ARCH_STRING_C */
+6 -6
arch/s390/lib/string.c
··· 20 20 21 21 asm volatile ("0: srst %0,%1\n" 22 22 " jo 0b" 23 - : "+d" (r0), "+a" (s) : : "cc" ); 23 + : "+d" (r0), "+a" (s) : : "cc", "memory"); 24 24 return (char *) r0; 25 25 } 26 26 ··· 31 31 32 32 asm volatile ("0: srst %0,%1\n" 33 33 " jo 0b" 34 - : "+d" (p), "+a" (s) : "d" (r0) : "cc" ); 34 + : "+d" (p), "+a" (s) : "d" (r0) : "cc", "memory"); 35 35 return (char *) p; 36 36 } 37 37 ··· 213 213 " sr %0,%1\n" 214 214 "1:" 215 215 : "+d" (ret), "+d" (r0), "+a" (cs), "+a" (ct) 216 - : : "cc" ); 216 + : : "cc", "memory"); 217 217 return ret; 218 218 } 219 219 EXPORT_SYMBOL(strcmp); ··· 250 250 " ipm %0\n" 251 251 " srl %0,28" 252 252 : "=&d" (cc), "+a" (r2), "+a" (r3), 253 - "+a" (r4), "+a" (r5) : : "cc"); 253 + "+a" (r4), "+a" (r5) : : "cc", "memory"); 254 254 return cc; 255 255 } 256 256 ··· 298 298 " jl 1f\n" 299 299 " la %0,0\n" 300 300 "1:" 301 - : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" ); 301 + : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 302 302 return (void *) ret; 303 303 } 304 304 EXPORT_SYMBOL(memchr); ··· 336 336 337 337 asm volatile ("0: srst %0,%1\n" 338 338 " jo 0b\n" 339 - : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" ); 339 + : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 340 340 return (void *) ret; 341 341 } 342 342 EXPORT_SYMBOL(memscan);