diff options
Diffstat (limited to 'arch/x86/include/asm/string_32.h')
-rw-r--r-- | arch/x86/include/asm/string_32.h | 104 |
1 files changed, 0 insertions, 104 deletions
diff --git a/arch/x86/include/asm/string_32.h b/arch/x86/include/asm/string_32.h index 55d392c6bd29..f74362b05619 100644 --- a/arch/x86/include/asm/string_32.h +++ b/arch/x86/include/asm/string_32.h | |||
@@ -179,14 +179,7 @@ static inline void *__memcpy3d(void *to, const void *from, size_t len) | |||
179 | * No 3D Now! | 179 | * No 3D Now! |
180 | */ | 180 | */ |
181 | 181 | ||
182 | #if (__GNUC__ >= 4) | ||
183 | #define memcpy(t, f, n) __builtin_memcpy(t, f, n) | 182 | #define memcpy(t, f, n) __builtin_memcpy(t, f, n) |
184 | #else | ||
185 | #define memcpy(t, f, n) \ | ||
186 | (__builtin_constant_p((n)) \ | ||
187 | ? __constant_memcpy((t), (f), (n)) \ | ||
188 | : __memcpy((t), (f), (n))) | ||
189 | #endif | ||
190 | 183 | ||
191 | #endif | 184 | #endif |
192 | #endif /* !CONFIG_FORTIFY_SOURCE */ | 185 | #endif /* !CONFIG_FORTIFY_SOURCE */ |
@@ -216,29 +209,6 @@ static inline void *__memset_generic(void *s, char c, size_t count) | |||
216 | /* we might want to write optimized versions of these later */ | 209 | /* we might want to write optimized versions of these later */ |
217 | #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count)) | 210 | #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count)) |
218 | 211 | ||
219 | /* | ||
220 | * memset(x, 0, y) is a reasonably common thing to do, so we want to fill | ||
221 | * things 32 bits at a time even when we don't know the size of the | ||
222 | * area at compile-time.. | ||
223 | */ | ||
224 | static __always_inline | ||
225 | void *__constant_c_memset(void *s, unsigned long c, size_t count) | ||
226 | { | ||
227 | int d0, d1; | ||
228 | asm volatile("rep ; stosl\n\t" | ||
229 | "testb $2,%b3\n\t" | ||
230 | "je 1f\n\t" | ||
231 | "stosw\n" | ||
232 | "1:\ttestb $1,%b3\n\t" | ||
233 | "je 2f\n\t" | ||
234 | "stosb\n" | ||
235 | "2:" | ||
236 | : "=&c" (d0), "=&D" (d1) | ||
237 | : "a" (c), "q" (count), "0" (count/4), "1" ((long)s) | ||
238 | : "memory"); | ||
239 | return s; | ||
240 | } | ||
241 | |||
242 | /* Added by Gertjan van Wingerde to make minix and sysv module work */ | 212 | /* Added by Gertjan van Wingerde to make minix and sysv module work */ |
243 | #define __HAVE_ARCH_STRNLEN | 213 | #define __HAVE_ARCH_STRNLEN |
244 | extern size_t strnlen(const char *s, size_t count); | 214 | extern size_t strnlen(const char *s, size_t count); |
@@ -247,72 +217,6 @@ extern size_t strnlen(const char *s, size_t count); | |||
247 | #define __HAVE_ARCH_STRSTR | 217 | #define __HAVE_ARCH_STRSTR |
248 | extern char *strstr(const char *cs, const char *ct); | 218 | extern char *strstr(const char *cs, const char *ct); |
249 | 219 | ||
250 | /* | ||
251 | * This looks horribly ugly, but the compiler can optimize it totally, | ||
252 | * as we by now know that both pattern and count is constant.. | ||
253 | */ | ||
254 | static __always_inline | ||
255 | void *__constant_c_and_count_memset(void *s, unsigned long pattern, | ||
256 | size_t count) | ||
257 | { | ||
258 | switch (count) { | ||
259 | case 0: | ||
260 | return s; | ||
261 | case 1: | ||
262 | *(unsigned char *)s = pattern & 0xff; | ||
263 | return s; | ||
264 | case 2: | ||
265 | *(unsigned short *)s = pattern & 0xffff; | ||
266 | return s; | ||
267 | case 3: | ||
268 | *(unsigned short *)s = pattern & 0xffff; | ||
269 | *((unsigned char *)s + 2) = pattern & 0xff; | ||
270 | return s; | ||
271 | case 4: | ||
272 | *(unsigned long *)s = pattern; | ||
273 | return s; | ||
274 | } | ||
275 | |||
276 | #define COMMON(x) \ | ||
277 | asm volatile("rep ; stosl" \ | ||
278 | x \ | ||
279 | : "=&c" (d0), "=&D" (d1) \ | ||
280 | : "a" (eax), "0" (count/4), "1" ((long)s) \ | ||
281 | : "memory") | ||
282 | |||
283 | { | ||
284 | int d0, d1; | ||
285 | #if __GNUC__ == 4 && __GNUC_MINOR__ == 0 | ||
286 | /* Workaround for broken gcc 4.0 */ | ||
287 | register unsigned long eax asm("%eax") = pattern; | ||
288 | #else | ||
289 | unsigned long eax = pattern; | ||
290 | #endif | ||
291 | |||
292 | switch (count % 4) { | ||
293 | case 0: | ||
294 | COMMON(""); | ||
295 | return s; | ||
296 | case 1: | ||
297 | COMMON("\n\tstosb"); | ||
298 | return s; | ||
299 | case 2: | ||
300 | COMMON("\n\tstosw"); | ||
301 | return s; | ||
302 | default: | ||
303 | COMMON("\n\tstosw\n\tstosb"); | ||
304 | return s; | ||
305 | } | ||
306 | } | ||
307 | |||
308 | #undef COMMON | ||
309 | } | ||
310 | |||
311 | #define __constant_c_x_memset(s, c, count) \ | ||
312 | (__builtin_constant_p(count) \ | ||
313 | ? __constant_c_and_count_memset((s), (c), (count)) \ | ||
314 | : __constant_c_memset((s), (c), (count))) | ||
315 | |||
316 | #define __memset(s, c, count) \ | 220 | #define __memset(s, c, count) \ |
317 | (__builtin_constant_p(count) \ | 221 | (__builtin_constant_p(count) \ |
318 | ? __constant_count_memset((s), (c), (count)) \ | 222 | ? __constant_count_memset((s), (c), (count)) \ |
@@ -321,15 +225,7 @@ void *__constant_c_and_count_memset(void *s, unsigned long pattern, | |||
321 | #define __HAVE_ARCH_MEMSET | 225 | #define __HAVE_ARCH_MEMSET |
322 | extern void *memset(void *, int, size_t); | 226 | extern void *memset(void *, int, size_t); |
323 | #ifndef CONFIG_FORTIFY_SOURCE | 227 | #ifndef CONFIG_FORTIFY_SOURCE |
324 | #if (__GNUC__ >= 4) | ||
325 | #define memset(s, c, count) __builtin_memset(s, c, count) | 228 | #define memset(s, c, count) __builtin_memset(s, c, count) |
326 | #else | ||
327 | #define memset(s, c, count) \ | ||
328 | (__builtin_constant_p(c) \ | ||
329 | ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \ | ||
330 | (count)) \ | ||
331 | : __memset((s), (c), (count))) | ||
332 | #endif | ||
333 | #endif /* !CONFIG_FORTIFY_SOURCE */ | 229 | #endif /* !CONFIG_FORTIFY_SOURCE */ |
334 | 230 | ||
335 | #define __HAVE_ARCH_MEMSET16 | 231 | #define __HAVE_ARCH_MEMSET16 |