aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2019-03-17 12:21:48 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2019-03-17 12:21:48 -0400
commit80b98e92ebcb4433b86fd32b5d82ec6b0d75cf59 (patch)
tree1820dfcb4673328a782f84813f5f4a8debaea7bd
parent69ebf9a16a74cf05359b851a5dc614a63f075d1a (diff)
parent2e905c7abdcd5ff09b9df33d25eb7148c85bed2a (diff)
Merge branch 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 asm updates from Thomas Gleixner: "Two cleanup patches removing dead conditionals and unused code" * 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: x86/asm: Remove unused __constant_c_x_memset() macro and inlines x86/asm: Remove dead __GNUC__ conditionals
-rw-r--r--arch/x86/include/asm/bitops.h6
-rw-r--r--arch/x86/include/asm/string_32.h104
-rw-r--r--arch/x86/include/asm/string_64.h15
3 files changed, 0 insertions, 125 deletions
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
index ad7b210aa3f6..d153d570bb04 100644
--- a/arch/x86/include/asm/bitops.h
+++ b/arch/x86/include/asm/bitops.h
@@ -36,13 +36,7 @@
36 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). 36 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
37 */ 37 */
38 38
39#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1)
40/* Technically wrong, but this avoids compilation errors on some gcc
41 versions. */
42#define BITOP_ADDR(x) "=m" (*(volatile long *) (x))
43#else
44#define BITOP_ADDR(x) "+m" (*(volatile long *) (x)) 39#define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
45#endif
46 40
47#define ADDR BITOP_ADDR(addr) 41#define ADDR BITOP_ADDR(addr)
48 42
diff --git a/arch/x86/include/asm/string_32.h b/arch/x86/include/asm/string_32.h
index 55d392c6bd29..f74362b05619 100644
--- a/arch/x86/include/asm/string_32.h
+++ b/arch/x86/include/asm/string_32.h
@@ -179,14 +179,7 @@ static inline void *__memcpy3d(void *to, const void *from, size_t len)
179 * No 3D Now! 179 * No 3D Now!
180 */ 180 */
181 181
182#if (__GNUC__ >= 4)
183#define memcpy(t, f, n) __builtin_memcpy(t, f, n) 182#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
184#else
185#define memcpy(t, f, n) \
186 (__builtin_constant_p((n)) \
187 ? __constant_memcpy((t), (f), (n)) \
188 : __memcpy((t), (f), (n)))
189#endif
190 183
191#endif 184#endif
192#endif /* !CONFIG_FORTIFY_SOURCE */ 185#endif /* !CONFIG_FORTIFY_SOURCE */
@@ -216,29 +209,6 @@ static inline void *__memset_generic(void *s, char c, size_t count)
216/* we might want to write optimized versions of these later */ 209/* we might want to write optimized versions of these later */
217#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count)) 210#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
218 211
219/*
220 * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
221 * things 32 bits at a time even when we don't know the size of the
222 * area at compile-time..
223 */
224static __always_inline
225void *__constant_c_memset(void *s, unsigned long c, size_t count)
226{
227 int d0, d1;
228 asm volatile("rep ; stosl\n\t"
229 "testb $2,%b3\n\t"
230 "je 1f\n\t"
231 "stosw\n"
232 "1:\ttestb $1,%b3\n\t"
233 "je 2f\n\t"
234 "stosb\n"
235 "2:"
236 : "=&c" (d0), "=&D" (d1)
237 : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
238 : "memory");
239 return s;
240}
241
242/* Added by Gertjan van Wingerde to make minix and sysv module work */ 212/* Added by Gertjan van Wingerde to make minix and sysv module work */
243#define __HAVE_ARCH_STRNLEN 213#define __HAVE_ARCH_STRNLEN
244extern size_t strnlen(const char *s, size_t count); 214extern size_t strnlen(const char *s, size_t count);
@@ -247,72 +217,6 @@ extern size_t strnlen(const char *s, size_t count);
247#define __HAVE_ARCH_STRSTR 217#define __HAVE_ARCH_STRSTR
248extern char *strstr(const char *cs, const char *ct); 218extern char *strstr(const char *cs, const char *ct);
249 219
250/*
251 * This looks horribly ugly, but the compiler can optimize it totally,
252 * as we by now know that both pattern and count is constant..
253 */
254static __always_inline
255void *__constant_c_and_count_memset(void *s, unsigned long pattern,
256 size_t count)
257{
258 switch (count) {
259 case 0:
260 return s;
261 case 1:
262 *(unsigned char *)s = pattern & 0xff;
263 return s;
264 case 2:
265 *(unsigned short *)s = pattern & 0xffff;
266 return s;
267 case 3:
268 *(unsigned short *)s = pattern & 0xffff;
269 *((unsigned char *)s + 2) = pattern & 0xff;
270 return s;
271 case 4:
272 *(unsigned long *)s = pattern;
273 return s;
274 }
275
276#define COMMON(x) \
277 asm volatile("rep ; stosl" \
278 x \
279 : "=&c" (d0), "=&D" (d1) \
280 : "a" (eax), "0" (count/4), "1" ((long)s) \
281 : "memory")
282
283 {
284 int d0, d1;
285#if __GNUC__ == 4 && __GNUC_MINOR__ == 0
286 /* Workaround for broken gcc 4.0 */
287 register unsigned long eax asm("%eax") = pattern;
288#else
289 unsigned long eax = pattern;
290#endif
291
292 switch (count % 4) {
293 case 0:
294 COMMON("");
295 return s;
296 case 1:
297 COMMON("\n\tstosb");
298 return s;
299 case 2:
300 COMMON("\n\tstosw");
301 return s;
302 default:
303 COMMON("\n\tstosw\n\tstosb");
304 return s;
305 }
306 }
307
308#undef COMMON
309}
310
311#define __constant_c_x_memset(s, c, count) \
312 (__builtin_constant_p(count) \
313 ? __constant_c_and_count_memset((s), (c), (count)) \
314 : __constant_c_memset((s), (c), (count)))
315
316#define __memset(s, c, count) \ 220#define __memset(s, c, count) \
317 (__builtin_constant_p(count) \ 221 (__builtin_constant_p(count) \
318 ? __constant_count_memset((s), (c), (count)) \ 222 ? __constant_count_memset((s), (c), (count)) \
@@ -321,15 +225,7 @@ void *__constant_c_and_count_memset(void *s, unsigned long pattern,
321#define __HAVE_ARCH_MEMSET 225#define __HAVE_ARCH_MEMSET
322extern void *memset(void *, int, size_t); 226extern void *memset(void *, int, size_t);
323#ifndef CONFIG_FORTIFY_SOURCE 227#ifndef CONFIG_FORTIFY_SOURCE
324#if (__GNUC__ >= 4)
325#define memset(s, c, count) __builtin_memset(s, c, count) 228#define memset(s, c, count) __builtin_memset(s, c, count)
326#else
327#define memset(s, c, count) \
328 (__builtin_constant_p(c) \
329 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
330 (count)) \
331 : __memset((s), (c), (count)))
332#endif
333#endif /* !CONFIG_FORTIFY_SOURCE */ 229#endif /* !CONFIG_FORTIFY_SOURCE */
334 230
335#define __HAVE_ARCH_MEMSET16 231#define __HAVE_ARCH_MEMSET16
diff --git a/arch/x86/include/asm/string_64.h b/arch/x86/include/asm/string_64.h
index 4e4194e21a09..75314c3dbe47 100644
--- a/arch/x86/include/asm/string_64.h
+++ b/arch/x86/include/asm/string_64.h
@@ -14,21 +14,6 @@
14extern void *memcpy(void *to, const void *from, size_t len); 14extern void *memcpy(void *to, const void *from, size_t len);
15extern void *__memcpy(void *to, const void *from, size_t len); 15extern void *__memcpy(void *to, const void *from, size_t len);
16 16
17#ifndef CONFIG_FORTIFY_SOURCE
18#if (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || __GNUC__ < 4
19#define memcpy(dst, src, len) \
20({ \
21 size_t __len = (len); \
22 void *__ret; \
23 if (__builtin_constant_p(len) && __len >= 64) \
24 __ret = __memcpy((dst), (src), __len); \
25 else \
26 __ret = __builtin_memcpy((dst), (src), __len); \
27 __ret; \
28})
29#endif
30#endif /* !CONFIG_FORTIFY_SOURCE */
31
32#define __HAVE_ARCH_MEMSET 17#define __HAVE_ARCH_MEMSET
33void *memset(void *s, int c, size_t n); 18void *memset(void *s, int c, size_t n);
34void *__memset(void *s, int c, size_t n); 19void *__memset(void *s, int c, size_t n);