diff options
author | Linus Torvalds <torvalds@ppc970.osdl.org> | 2005-04-16 18:20:36 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@ppc970.osdl.org> | 2005-04-16 18:20:36 -0400 |
commit | 1da177e4c3f41524e886b7f1b8a0c1fc7321cac2 (patch) | |
tree | 0bba044c4ce775e45a88a51686b5d9f90697ea9d /include/asm-i386/string.h |
Linux-2.6.12-rc2v2.6.12-rc2
Initial git repository build. I'm not bothering with the full history,
even though we have it. We can create a separate "historical" git
archive of that later if we want to, and in the meantime it's about
3.2GB when imported into git - space that would just make the early
git days unnecessarily complicated, when we don't have a lot of good
infrastructure for it.
Let it rip!
Diffstat (limited to 'include/asm-i386/string.h')
-rw-r--r-- | include/asm-i386/string.h | 449 |
1 files changed, 449 insertions, 0 deletions
diff --git a/include/asm-i386/string.h b/include/asm-i386/string.h new file mode 100644 index 000000000000..1679983d053f --- /dev/null +++ b/include/asm-i386/string.h | |||
@@ -0,0 +1,449 @@ | |||
1 | #ifndef _I386_STRING_H_ | ||
2 | #define _I386_STRING_H_ | ||
3 | |||
4 | #ifdef __KERNEL__ | ||
5 | #include <linux/config.h> | ||
6 | /* | ||
7 | * On a 486 or Pentium, we are better off not using the | ||
8 | * byte string operations. But on a 386 or a PPro the | ||
9 | * byte string ops are faster than doing it by hand | ||
10 | * (MUCH faster on a Pentium). | ||
11 | */ | ||
12 | |||
13 | /* | ||
14 | * This string-include defines all string functions as inline | ||
15 | * functions. Use gcc. It also assumes ds=es=data space, this should be | ||
16 | * normal. Most of the string-functions are rather heavily hand-optimized, | ||
17 | * see especially strsep,strstr,str[c]spn. They should work, but are not | ||
18 | * very easy to understand. Everything is done entirely within the register | ||
19 | * set, making the functions fast and clean. String instructions have been | ||
20 | * used through-out, making for "slightly" unclear code :-) | ||
21 | * | ||
22 | * NO Copyright (C) 1991, 1992 Linus Torvalds, | ||
23 | * consider these trivial functions to be PD. | ||
24 | */ | ||
25 | |||
26 | /* AK: in fact I bet it would be better to move this stuff all out of line. | ||
27 | */ | ||
28 | |||
29 | #define __HAVE_ARCH_STRCPY | ||
30 | static inline char * strcpy(char * dest,const char *src) | ||
31 | { | ||
32 | int d0, d1, d2; | ||
33 | __asm__ __volatile__( | ||
34 | "1:\tlodsb\n\t" | ||
35 | "stosb\n\t" | ||
36 | "testb %%al,%%al\n\t" | ||
37 | "jne 1b" | ||
38 | : "=&S" (d0), "=&D" (d1), "=&a" (d2) | ||
39 | :"0" (src),"1" (dest) : "memory"); | ||
40 | return dest; | ||
41 | } | ||
42 | |||
43 | #define __HAVE_ARCH_STRNCPY | ||
44 | static inline char * strncpy(char * dest,const char *src,size_t count) | ||
45 | { | ||
46 | int d0, d1, d2, d3; | ||
47 | __asm__ __volatile__( | ||
48 | "1:\tdecl %2\n\t" | ||
49 | "js 2f\n\t" | ||
50 | "lodsb\n\t" | ||
51 | "stosb\n\t" | ||
52 | "testb %%al,%%al\n\t" | ||
53 | "jne 1b\n\t" | ||
54 | "rep\n\t" | ||
55 | "stosb\n" | ||
56 | "2:" | ||
57 | : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3) | ||
58 | :"0" (src),"1" (dest),"2" (count) : "memory"); | ||
59 | return dest; | ||
60 | } | ||
61 | |||
62 | #define __HAVE_ARCH_STRCAT | ||
63 | static inline char * strcat(char * dest,const char * src) | ||
64 | { | ||
65 | int d0, d1, d2, d3; | ||
66 | __asm__ __volatile__( | ||
67 | "repne\n\t" | ||
68 | "scasb\n\t" | ||
69 | "decl %1\n" | ||
70 | "1:\tlodsb\n\t" | ||
71 | "stosb\n\t" | ||
72 | "testb %%al,%%al\n\t" | ||
73 | "jne 1b" | ||
74 | : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3) | ||
75 | : "0" (src), "1" (dest), "2" (0), "3" (0xffffffffu):"memory"); | ||
76 | return dest; | ||
77 | } | ||
78 | |||
79 | #define __HAVE_ARCH_STRNCAT | ||
80 | static inline char * strncat(char * dest,const char * src,size_t count) | ||
81 | { | ||
82 | int d0, d1, d2, d3; | ||
83 | __asm__ __volatile__( | ||
84 | "repne\n\t" | ||
85 | "scasb\n\t" | ||
86 | "decl %1\n\t" | ||
87 | "movl %8,%3\n" | ||
88 | "1:\tdecl %3\n\t" | ||
89 | "js 2f\n\t" | ||
90 | "lodsb\n\t" | ||
91 | "stosb\n\t" | ||
92 | "testb %%al,%%al\n\t" | ||
93 | "jne 1b\n" | ||
94 | "2:\txorl %2,%2\n\t" | ||
95 | "stosb" | ||
96 | : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3) | ||
97 | : "0" (src),"1" (dest),"2" (0),"3" (0xffffffffu), "g" (count) | ||
98 | : "memory"); | ||
99 | return dest; | ||
100 | } | ||
101 | |||
102 | #define __HAVE_ARCH_STRCMP | ||
103 | static inline int strcmp(const char * cs,const char * ct) | ||
104 | { | ||
105 | int d0, d1; | ||
106 | register int __res; | ||
107 | __asm__ __volatile__( | ||
108 | "1:\tlodsb\n\t" | ||
109 | "scasb\n\t" | ||
110 | "jne 2f\n\t" | ||
111 | "testb %%al,%%al\n\t" | ||
112 | "jne 1b\n\t" | ||
113 | "xorl %%eax,%%eax\n\t" | ||
114 | "jmp 3f\n" | ||
115 | "2:\tsbbl %%eax,%%eax\n\t" | ||
116 | "orb $1,%%al\n" | ||
117 | "3:" | ||
118 | :"=a" (__res), "=&S" (d0), "=&D" (d1) | ||
119 | :"1" (cs),"2" (ct)); | ||
120 | return __res; | ||
121 | } | ||
122 | |||
123 | #define __HAVE_ARCH_STRNCMP | ||
124 | static inline int strncmp(const char * cs,const char * ct,size_t count) | ||
125 | { | ||
126 | register int __res; | ||
127 | int d0, d1, d2; | ||
128 | __asm__ __volatile__( | ||
129 | "1:\tdecl %3\n\t" | ||
130 | "js 2f\n\t" | ||
131 | "lodsb\n\t" | ||
132 | "scasb\n\t" | ||
133 | "jne 3f\n\t" | ||
134 | "testb %%al,%%al\n\t" | ||
135 | "jne 1b\n" | ||
136 | "2:\txorl %%eax,%%eax\n\t" | ||
137 | "jmp 4f\n" | ||
138 | "3:\tsbbl %%eax,%%eax\n\t" | ||
139 | "orb $1,%%al\n" | ||
140 | "4:" | ||
141 | :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2) | ||
142 | :"1" (cs),"2" (ct),"3" (count)); | ||
143 | return __res; | ||
144 | } | ||
145 | |||
146 | #define __HAVE_ARCH_STRCHR | ||
147 | static inline char * strchr(const char * s, int c) | ||
148 | { | ||
149 | int d0; | ||
150 | register char * __res; | ||
151 | __asm__ __volatile__( | ||
152 | "movb %%al,%%ah\n" | ||
153 | "1:\tlodsb\n\t" | ||
154 | "cmpb %%ah,%%al\n\t" | ||
155 | "je 2f\n\t" | ||
156 | "testb %%al,%%al\n\t" | ||
157 | "jne 1b\n\t" | ||
158 | "movl $1,%1\n" | ||
159 | "2:\tmovl %1,%0\n\t" | ||
160 | "decl %0" | ||
161 | :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c)); | ||
162 | return __res; | ||
163 | } | ||
164 | |||
165 | #define __HAVE_ARCH_STRRCHR | ||
166 | static inline char * strrchr(const char * s, int c) | ||
167 | { | ||
168 | int d0, d1; | ||
169 | register char * __res; | ||
170 | __asm__ __volatile__( | ||
171 | "movb %%al,%%ah\n" | ||
172 | "1:\tlodsb\n\t" | ||
173 | "cmpb %%ah,%%al\n\t" | ||
174 | "jne 2f\n\t" | ||
175 | "leal -1(%%esi),%0\n" | ||
176 | "2:\ttestb %%al,%%al\n\t" | ||
177 | "jne 1b" | ||
178 | :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c)); | ||
179 | return __res; | ||
180 | } | ||
181 | |||
182 | #define __HAVE_ARCH_STRLEN | ||
183 | static inline size_t strlen(const char * s) | ||
184 | { | ||
185 | int d0; | ||
186 | register int __res; | ||
187 | __asm__ __volatile__( | ||
188 | "repne\n\t" | ||
189 | "scasb\n\t" | ||
190 | "notl %0\n\t" | ||
191 | "decl %0" | ||
192 | :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffffu)); | ||
193 | return __res; | ||
194 | } | ||
195 | |||
196 | static inline void * __memcpy(void * to, const void * from, size_t n) | ||
197 | { | ||
198 | int d0, d1, d2; | ||
199 | __asm__ __volatile__( | ||
200 | "rep ; movsl\n\t" | ||
201 | "testb $2,%b4\n\t" | ||
202 | "je 1f\n\t" | ||
203 | "movsw\n" | ||
204 | "1:\ttestb $1,%b4\n\t" | ||
205 | "je 2f\n\t" | ||
206 | "movsb\n" | ||
207 | "2:" | ||
208 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) | ||
209 | :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from) | ||
210 | : "memory"); | ||
211 | return (to); | ||
212 | } | ||
213 | |||
214 | /* | ||
215 | * This looks horribly ugly, but the compiler can optimize it totally, | ||
216 | * as the count is constant. | ||
217 | */ | ||
218 | static inline void * __constant_memcpy(void * to, const void * from, size_t n) | ||
219 | { | ||
220 | if (n <= 128) | ||
221 | return __builtin_memcpy(to, from, n); | ||
222 | |||
223 | #define COMMON(x) \ | ||
224 | __asm__ __volatile__( \ | ||
225 | "rep ; movsl" \ | ||
226 | x \ | ||
227 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) \ | ||
228 | : "0" (n/4),"1" ((long) to),"2" ((long) from) \ | ||
229 | : "memory"); | ||
230 | { | ||
231 | int d0, d1, d2; | ||
232 | switch (n % 4) { | ||
233 | case 0: COMMON(""); return to; | ||
234 | case 1: COMMON("\n\tmovsb"); return to; | ||
235 | case 2: COMMON("\n\tmovsw"); return to; | ||
236 | default: COMMON("\n\tmovsw\n\tmovsb"); return to; | ||
237 | } | ||
238 | } | ||
239 | |||
240 | #undef COMMON | ||
241 | } | ||
242 | |||
243 | #define __HAVE_ARCH_MEMCPY | ||
244 | |||
245 | #ifdef CONFIG_X86_USE_3DNOW | ||
246 | |||
247 | #include <asm/mmx.h> | ||
248 | |||
249 | /* | ||
250 | * This CPU favours 3DNow strongly (eg AMD Athlon) | ||
251 | */ | ||
252 | |||
253 | static inline void * __constant_memcpy3d(void * to, const void * from, size_t len) | ||
254 | { | ||
255 | if (len < 512) | ||
256 | return __constant_memcpy(to, from, len); | ||
257 | return _mmx_memcpy(to, from, len); | ||
258 | } | ||
259 | |||
260 | static __inline__ void *__memcpy3d(void *to, const void *from, size_t len) | ||
261 | { | ||
262 | if (len < 512) | ||
263 | return __memcpy(to, from, len); | ||
264 | return _mmx_memcpy(to, from, len); | ||
265 | } | ||
266 | |||
267 | #define memcpy(t, f, n) \ | ||
268 | (__builtin_constant_p(n) ? \ | ||
269 | __constant_memcpy3d((t),(f),(n)) : \ | ||
270 | __memcpy3d((t),(f),(n))) | ||
271 | |||
272 | #else | ||
273 | |||
274 | /* | ||
275 | * No 3D Now! | ||
276 | */ | ||
277 | |||
278 | #define memcpy(t, f, n) \ | ||
279 | (__builtin_constant_p(n) ? \ | ||
280 | __constant_memcpy((t),(f),(n)) : \ | ||
281 | __memcpy((t),(f),(n))) | ||
282 | |||
283 | #endif | ||
284 | |||
285 | #define __HAVE_ARCH_MEMMOVE | ||
286 | void *memmove(void * dest,const void * src, size_t n); | ||
287 | |||
288 | #define memcmp __builtin_memcmp | ||
289 | |||
290 | #define __HAVE_ARCH_MEMCHR | ||
291 | static inline void * memchr(const void * cs,int c,size_t count) | ||
292 | { | ||
293 | int d0; | ||
294 | register void * __res; | ||
295 | if (!count) | ||
296 | return NULL; | ||
297 | __asm__ __volatile__( | ||
298 | "repne\n\t" | ||
299 | "scasb\n\t" | ||
300 | "je 1f\n\t" | ||
301 | "movl $1,%0\n" | ||
302 | "1:\tdecl %0" | ||
303 | :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count)); | ||
304 | return __res; | ||
305 | } | ||
306 | |||
307 | static inline void * __memset_generic(void * s, char c,size_t count) | ||
308 | { | ||
309 | int d0, d1; | ||
310 | __asm__ __volatile__( | ||
311 | "rep\n\t" | ||
312 | "stosb" | ||
313 | : "=&c" (d0), "=&D" (d1) | ||
314 | :"a" (c),"1" (s),"0" (count) | ||
315 | :"memory"); | ||
316 | return s; | ||
317 | } | ||
318 | |||
319 | /* we might want to write optimized versions of these later */ | ||
320 | #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count)) | ||
321 | |||
322 | /* | ||
323 | * memset(x,0,y) is a reasonably common thing to do, so we want to fill | ||
324 | * things 32 bits at a time even when we don't know the size of the | ||
325 | * area at compile-time.. | ||
326 | */ | ||
327 | static inline void * __constant_c_memset(void * s, unsigned long c, size_t count) | ||
328 | { | ||
329 | int d0, d1; | ||
330 | __asm__ __volatile__( | ||
331 | "rep ; stosl\n\t" | ||
332 | "testb $2,%b3\n\t" | ||
333 | "je 1f\n\t" | ||
334 | "stosw\n" | ||
335 | "1:\ttestb $1,%b3\n\t" | ||
336 | "je 2f\n\t" | ||
337 | "stosb\n" | ||
338 | "2:" | ||
339 | : "=&c" (d0), "=&D" (d1) | ||
340 | :"a" (c), "q" (count), "0" (count/4), "1" ((long) s) | ||
341 | :"memory"); | ||
342 | return (s); | ||
343 | } | ||
344 | |||
345 | /* Added by Gertjan van Wingerde to make minix and sysv module work */ | ||
346 | #define __HAVE_ARCH_STRNLEN | ||
347 | static inline size_t strnlen(const char * s, size_t count) | ||
348 | { | ||
349 | int d0; | ||
350 | register int __res; | ||
351 | __asm__ __volatile__( | ||
352 | "movl %2,%0\n\t" | ||
353 | "jmp 2f\n" | ||
354 | "1:\tcmpb $0,(%0)\n\t" | ||
355 | "je 3f\n\t" | ||
356 | "incl %0\n" | ||
357 | "2:\tdecl %1\n\t" | ||
358 | "cmpl $-1,%1\n\t" | ||
359 | "jne 1b\n" | ||
360 | "3:\tsubl %2,%0" | ||
361 | :"=a" (__res), "=&d" (d0) | ||
362 | :"c" (s),"1" (count)); | ||
363 | return __res; | ||
364 | } | ||
365 | /* end of additional stuff */ | ||
366 | |||
367 | #define __HAVE_ARCH_STRSTR | ||
368 | |||
369 | extern char *strstr(const char *cs, const char *ct); | ||
370 | |||
371 | /* | ||
372 | * This looks horribly ugly, but the compiler can optimize it totally, | ||
373 | * as we by now know that both pattern and count is constant.. | ||
374 | */ | ||
375 | static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count) | ||
376 | { | ||
377 | switch (count) { | ||
378 | case 0: | ||
379 | return s; | ||
380 | case 1: | ||
381 | *(unsigned char *)s = pattern; | ||
382 | return s; | ||
383 | case 2: | ||
384 | *(unsigned short *)s = pattern; | ||
385 | return s; | ||
386 | case 3: | ||
387 | *(unsigned short *)s = pattern; | ||
388 | *(2+(unsigned char *)s) = pattern; | ||
389 | return s; | ||
390 | case 4: | ||
391 | *(unsigned long *)s = pattern; | ||
392 | return s; | ||
393 | } | ||
394 | #define COMMON(x) \ | ||
395 | __asm__ __volatile__( \ | ||
396 | "rep ; stosl" \ | ||
397 | x \ | ||
398 | : "=&c" (d0), "=&D" (d1) \ | ||
399 | : "a" (pattern),"0" (count/4),"1" ((long) s) \ | ||
400 | : "memory") | ||
401 | { | ||
402 | int d0, d1; | ||
403 | switch (count % 4) { | ||
404 | case 0: COMMON(""); return s; | ||
405 | case 1: COMMON("\n\tstosb"); return s; | ||
406 | case 2: COMMON("\n\tstosw"); return s; | ||
407 | default: COMMON("\n\tstosw\n\tstosb"); return s; | ||
408 | } | ||
409 | } | ||
410 | |||
411 | #undef COMMON | ||
412 | } | ||
413 | |||
414 | #define __constant_c_x_memset(s, c, count) \ | ||
415 | (__builtin_constant_p(count) ? \ | ||
416 | __constant_c_and_count_memset((s),(c),(count)) : \ | ||
417 | __constant_c_memset((s),(c),(count))) | ||
418 | |||
419 | #define __memset(s, c, count) \ | ||
420 | (__builtin_constant_p(count) ? \ | ||
421 | __constant_count_memset((s),(c),(count)) : \ | ||
422 | __memset_generic((s),(c),(count))) | ||
423 | |||
424 | #define __HAVE_ARCH_MEMSET | ||
425 | #define memset(s, c, count) \ | ||
426 | (__builtin_constant_p(c) ? \ | ||
427 | __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \ | ||
428 | __memset((s),(c),(count))) | ||
429 | |||
430 | /* | ||
431 | * find the first occurrence of byte 'c', or 1 past the area if none | ||
432 | */ | ||
433 | #define __HAVE_ARCH_MEMSCAN | ||
434 | static inline void * memscan(void * addr, int c, size_t size) | ||
435 | { | ||
436 | if (!size) | ||
437 | return addr; | ||
438 | __asm__("repnz; scasb\n\t" | ||
439 | "jnz 1f\n\t" | ||
440 | "dec %%edi\n" | ||
441 | "1:" | ||
442 | : "=D" (addr), "=c" (size) | ||
443 | : "0" (addr), "1" (size), "a" (c)); | ||
444 | return addr; | ||
445 | } | ||
446 | |||
447 | #endif /* __KERNEL__ */ | ||
448 | |||
449 | #endif | ||