aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHeiko Carstens <heiko.carstens@de.ibm.com>2016-12-13 03:50:30 -0500
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2016-12-14 10:33:41 -0500
commit7a71fd1c59dfd20fac4d14486d63d3d5ab70498a (patch)
tree50466ef7939ae6850cc36212ca2ab9cc3cb8964a
parent259acc5c255a4260b3db0461afd5d93fabfe8524 (diff)
s390/lib: add missing memory barriers to string inline assemblies
We have a couple of inline assemblies like memchr() and strlen() that read from memory, but tell the compiler only they need the addresses of the strings they access. This allows the compiler to omit the initialization of such strings and therefore generate broken code. Add the missing memory barrier to all string related inline assemblies to fix this potential issue. It looks like the compiler currently does not generate broken code due to these bugs. Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
-rw-r--r--arch/s390/include/asm/string.h8
-rw-r--r--arch/s390/lib/string.c12
2 files changed, 10 insertions, 10 deletions
diff --git a/arch/s390/include/asm/string.h b/arch/s390/include/asm/string.h
index 15a3c005c274..e5f5c7074f2c 100644
--- a/arch/s390/include/asm/string.h
+++ b/arch/s390/include/asm/string.h
@@ -62,7 +62,7 @@ static inline void *memchr(const void * s, int c, size_t n)
62 " jl 1f\n" 62 " jl 1f\n"
63 " la %0,0\n" 63 " la %0,0\n"
64 "1:" 64 "1:"
65 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc"); 65 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory");
66 return (void *) ret; 66 return (void *) ret;
67} 67}
68 68
@@ -74,7 +74,7 @@ static inline void *memscan(void *s, int c, size_t n)
74 asm volatile( 74 asm volatile(
75 "0: srst %0,%1\n" 75 "0: srst %0,%1\n"
76 " jo 0b\n" 76 " jo 0b\n"
77 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc"); 77 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory");
78 return (void *) ret; 78 return (void *) ret;
79} 79}
80 80
@@ -115,7 +115,7 @@ static inline size_t strlen(const char *s)
115 asm volatile( 115 asm volatile(
116 "0: srst %0,%1\n" 116 "0: srst %0,%1\n"
117 " jo 0b" 117 " jo 0b"
118 : "+d" (r0), "+a" (tmp) : : "cc"); 118 : "+d" (r0), "+a" (tmp) : : "cc", "memory");
119 return r0 - (unsigned long) s; 119 return r0 - (unsigned long) s;
120} 120}
121 121
@@ -128,7 +128,7 @@ static inline size_t strnlen(const char * s, size_t n)
128 asm volatile( 128 asm volatile(
129 "0: srst %0,%1\n" 129 "0: srst %0,%1\n"
130 " jo 0b" 130 " jo 0b"
131 : "+a" (end), "+a" (tmp) : "d" (r0) : "cc"); 131 : "+a" (end), "+a" (tmp) : "d" (r0) : "cc", "memory");
132 return end - s; 132 return end - s;
133} 133}
134#else /* IN_ARCH_STRING_C */ 134#else /* IN_ARCH_STRING_C */
diff --git a/arch/s390/lib/string.c b/arch/s390/lib/string.c
index 48352bffbc92..f71d9f655970 100644
--- a/arch/s390/lib/string.c
+++ b/arch/s390/lib/string.c
@@ -20,7 +20,7 @@ static inline char *__strend(const char *s)
20 20
21 asm volatile ("0: srst %0,%1\n" 21 asm volatile ("0: srst %0,%1\n"
22 " jo 0b" 22 " jo 0b"
23 : "+d" (r0), "+a" (s) : : "cc" ); 23 : "+d" (r0), "+a" (s) : : "cc", "memory");
24 return (char *) r0; 24 return (char *) r0;
25} 25}
26 26
@@ -31,7 +31,7 @@ static inline char *__strnend(const char *s, size_t n)
31 31
32 asm volatile ("0: srst %0,%1\n" 32 asm volatile ("0: srst %0,%1\n"
33 " jo 0b" 33 " jo 0b"
34 : "+d" (p), "+a" (s) : "d" (r0) : "cc" ); 34 : "+d" (p), "+a" (s) : "d" (r0) : "cc", "memory");
35 return (char *) p; 35 return (char *) p;
36} 36}
37 37
@@ -213,7 +213,7 @@ int strcmp(const char *cs, const char *ct)
213 " sr %0,%1\n" 213 " sr %0,%1\n"
214 "1:" 214 "1:"
215 : "+d" (ret), "+d" (r0), "+a" (cs), "+a" (ct) 215 : "+d" (ret), "+d" (r0), "+a" (cs), "+a" (ct)
216 : : "cc" ); 216 : : "cc", "memory");
217 return ret; 217 return ret;
218} 218}
219EXPORT_SYMBOL(strcmp); 219EXPORT_SYMBOL(strcmp);
@@ -250,7 +250,7 @@ static inline int clcle(const char *s1, unsigned long l1,
250 " ipm %0\n" 250 " ipm %0\n"
251 " srl %0,28" 251 " srl %0,28"
252 : "=&d" (cc), "+a" (r2), "+a" (r3), 252 : "=&d" (cc), "+a" (r2), "+a" (r3),
253 "+a" (r4), "+a" (r5) : : "cc"); 253 "+a" (r4), "+a" (r5) : : "cc", "memory");
254 return cc; 254 return cc;
255} 255}
256 256
@@ -298,7 +298,7 @@ void *memchr(const void *s, int c, size_t n)
298 " jl 1f\n" 298 " jl 1f\n"
299 " la %0,0\n" 299 " la %0,0\n"
300 "1:" 300 "1:"
301 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" ); 301 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory");
302 return (void *) ret; 302 return (void *) ret;
303} 303}
304EXPORT_SYMBOL(memchr); 304EXPORT_SYMBOL(memchr);
@@ -336,7 +336,7 @@ void *memscan(void *s, int c, size_t n)
336 336
337 asm volatile ("0: srst %0,%1\n" 337 asm volatile ("0: srst %0,%1\n"
338 " jo 0b\n" 338 " jo 0b\n"
339 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" ); 339 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory");
340 return (void *) ret; 340 return (void *) ret;
341} 341}
342EXPORT_SYMBOL(memscan); 342EXPORT_SYMBOL(memscan);