diff options
Diffstat (limited to 'include/asm-x86/string_32.h')
-rw-r--r-- | include/asm-x86/string_32.h | 276 |
1 files changed, 276 insertions, 0 deletions
diff --git a/include/asm-x86/string_32.h b/include/asm-x86/string_32.h new file mode 100644 index 000000000000..a9b64453bdf5 --- /dev/null +++ b/include/asm-x86/string_32.h | |||
@@ -0,0 +1,276 @@ | |||
1 | #ifndef _I386_STRING_H_ | ||
2 | #define _I386_STRING_H_ | ||
3 | |||
4 | #ifdef __KERNEL__ | ||
5 | |||
6 | /* Let gcc decide wether to inline or use the out of line functions */ | ||
7 | |||
8 | #define __HAVE_ARCH_STRCPY | ||
9 | extern char *strcpy(char *dest, const char *src); | ||
10 | |||
11 | #define __HAVE_ARCH_STRNCPY | ||
12 | extern char *strncpy(char *dest, const char *src, size_t count); | ||
13 | |||
14 | #define __HAVE_ARCH_STRCAT | ||
15 | extern char *strcat(char *dest, const char *src); | ||
16 | |||
17 | #define __HAVE_ARCH_STRNCAT | ||
18 | extern char *strncat(char *dest, const char *src, size_t count); | ||
19 | |||
20 | #define __HAVE_ARCH_STRCMP | ||
21 | extern int strcmp(const char *cs, const char *ct); | ||
22 | |||
23 | #define __HAVE_ARCH_STRNCMP | ||
24 | extern int strncmp(const char *cs, const char *ct, size_t count); | ||
25 | |||
26 | #define __HAVE_ARCH_STRCHR | ||
27 | extern char *strchr(const char *s, int c); | ||
28 | |||
29 | #define __HAVE_ARCH_STRRCHR | ||
30 | extern char *strrchr(const char *s, int c); | ||
31 | |||
32 | #define __HAVE_ARCH_STRLEN | ||
33 | extern size_t strlen(const char *s); | ||
34 | |||
35 | static __always_inline void * __memcpy(void * to, const void * from, size_t n) | ||
36 | { | ||
37 | int d0, d1, d2; | ||
38 | __asm__ __volatile__( | ||
39 | "rep ; movsl\n\t" | ||
40 | "movl %4,%%ecx\n\t" | ||
41 | "andl $3,%%ecx\n\t" | ||
42 | "jz 1f\n\t" | ||
43 | "rep ; movsb\n\t" | ||
44 | "1:" | ||
45 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) | ||
46 | : "0" (n/4), "g" (n), "1" ((long) to), "2" ((long) from) | ||
47 | : "memory"); | ||
48 | return (to); | ||
49 | } | ||
50 | |||
51 | /* | ||
52 | * This looks ugly, but the compiler can optimize it totally, | ||
53 | * as the count is constant. | ||
54 | */ | ||
55 | static __always_inline void * __constant_memcpy(void * to, const void * from, size_t n) | ||
56 | { | ||
57 | long esi, edi; | ||
58 | if (!n) return to; | ||
59 | #if 1 /* want to do small copies with non-string ops? */ | ||
60 | switch (n) { | ||
61 | case 1: *(char*)to = *(char*)from; return to; | ||
62 | case 2: *(short*)to = *(short*)from; return to; | ||
63 | case 4: *(int*)to = *(int*)from; return to; | ||
64 | #if 1 /* including those doable with two moves? */ | ||
65 | case 3: *(short*)to = *(short*)from; | ||
66 | *((char*)to+2) = *((char*)from+2); return to; | ||
67 | case 5: *(int*)to = *(int*)from; | ||
68 | *((char*)to+4) = *((char*)from+4); return to; | ||
69 | case 6: *(int*)to = *(int*)from; | ||
70 | *((short*)to+2) = *((short*)from+2); return to; | ||
71 | case 8: *(int*)to = *(int*)from; | ||
72 | *((int*)to+1) = *((int*)from+1); return to; | ||
73 | #endif | ||
74 | } | ||
75 | #endif | ||
76 | esi = (long) from; | ||
77 | edi = (long) to; | ||
78 | if (n >= 5*4) { | ||
79 | /* large block: use rep prefix */ | ||
80 | int ecx; | ||
81 | __asm__ __volatile__( | ||
82 | "rep ; movsl" | ||
83 | : "=&c" (ecx), "=&D" (edi), "=&S" (esi) | ||
84 | : "0" (n/4), "1" (edi),"2" (esi) | ||
85 | : "memory" | ||
86 | ); | ||
87 | } else { | ||
88 | /* small block: don't clobber ecx + smaller code */ | ||
89 | if (n >= 4*4) __asm__ __volatile__("movsl" | ||
90 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
91 | if (n >= 3*4) __asm__ __volatile__("movsl" | ||
92 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
93 | if (n >= 2*4) __asm__ __volatile__("movsl" | ||
94 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
95 | if (n >= 1*4) __asm__ __volatile__("movsl" | ||
96 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
97 | } | ||
98 | switch (n % 4) { | ||
99 | /* tail */ | ||
100 | case 0: return to; | ||
101 | case 1: __asm__ __volatile__("movsb" | ||
102 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
103 | return to; | ||
104 | case 2: __asm__ __volatile__("movsw" | ||
105 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
106 | return to; | ||
107 | default: __asm__ __volatile__("movsw\n\tmovsb" | ||
108 | :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory"); | ||
109 | return to; | ||
110 | } | ||
111 | } | ||
112 | |||
113 | #define __HAVE_ARCH_MEMCPY | ||
114 | |||
115 | #ifdef CONFIG_X86_USE_3DNOW | ||
116 | |||
117 | #include <asm/mmx.h> | ||
118 | |||
119 | /* | ||
120 | * This CPU favours 3DNow strongly (eg AMD Athlon) | ||
121 | */ | ||
122 | |||
123 | static inline void * __constant_memcpy3d(void * to, const void * from, size_t len) | ||
124 | { | ||
125 | if (len < 512) | ||
126 | return __constant_memcpy(to, from, len); | ||
127 | return _mmx_memcpy(to, from, len); | ||
128 | } | ||
129 | |||
130 | static __inline__ void *__memcpy3d(void *to, const void *from, size_t len) | ||
131 | { | ||
132 | if (len < 512) | ||
133 | return __memcpy(to, from, len); | ||
134 | return _mmx_memcpy(to, from, len); | ||
135 | } | ||
136 | |||
137 | #define memcpy(t, f, n) \ | ||
138 | (__builtin_constant_p(n) ? \ | ||
139 | __constant_memcpy3d((t),(f),(n)) : \ | ||
140 | __memcpy3d((t),(f),(n))) | ||
141 | |||
142 | #else | ||
143 | |||
144 | /* | ||
145 | * No 3D Now! | ||
146 | */ | ||
147 | |||
148 | #define memcpy(t, f, n) \ | ||
149 | (__builtin_constant_p(n) ? \ | ||
150 | __constant_memcpy((t),(f),(n)) : \ | ||
151 | __memcpy((t),(f),(n))) | ||
152 | |||
153 | #endif | ||
154 | |||
155 | #define __HAVE_ARCH_MEMMOVE | ||
156 | void *memmove(void * dest,const void * src, size_t n); | ||
157 | |||
158 | #define memcmp __builtin_memcmp | ||
159 | |||
160 | #define __HAVE_ARCH_MEMCHR | ||
161 | extern void *memchr(const void * cs,int c,size_t count); | ||
162 | |||
163 | static inline void * __memset_generic(void * s, char c,size_t count) | ||
164 | { | ||
165 | int d0, d1; | ||
166 | __asm__ __volatile__( | ||
167 | "rep\n\t" | ||
168 | "stosb" | ||
169 | : "=&c" (d0), "=&D" (d1) | ||
170 | :"a" (c),"1" (s),"0" (count) | ||
171 | :"memory"); | ||
172 | return s; | ||
173 | } | ||
174 | |||
175 | /* we might want to write optimized versions of these later */ | ||
176 | #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count)) | ||
177 | |||
178 | /* | ||
179 | * memset(x,0,y) is a reasonably common thing to do, so we want to fill | ||
180 | * things 32 bits at a time even when we don't know the size of the | ||
181 | * area at compile-time.. | ||
182 | */ | ||
183 | static __always_inline void * __constant_c_memset(void * s, unsigned long c, size_t count) | ||
184 | { | ||
185 | int d0, d1; | ||
186 | __asm__ __volatile__( | ||
187 | "rep ; stosl\n\t" | ||
188 | "testb $2,%b3\n\t" | ||
189 | "je 1f\n\t" | ||
190 | "stosw\n" | ||
191 | "1:\ttestb $1,%b3\n\t" | ||
192 | "je 2f\n\t" | ||
193 | "stosb\n" | ||
194 | "2:" | ||
195 | :"=&c" (d0), "=&D" (d1) | ||
196 | :"a" (c), "q" (count), "0" (count/4), "1" ((long) s) | ||
197 | :"memory"); | ||
198 | return (s); | ||
199 | } | ||
200 | |||
201 | /* Added by Gertjan van Wingerde to make minix and sysv module work */ | ||
202 | #define __HAVE_ARCH_STRNLEN | ||
203 | extern size_t strnlen(const char * s, size_t count); | ||
204 | /* end of additional stuff */ | ||
205 | |||
206 | #define __HAVE_ARCH_STRSTR | ||
207 | extern char *strstr(const char *cs, const char *ct); | ||
208 | |||
209 | /* | ||
210 | * This looks horribly ugly, but the compiler can optimize it totally, | ||
211 | * as we by now know that both pattern and count is constant.. | ||
212 | */ | ||
213 | static __always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count) | ||
214 | { | ||
215 | switch (count) { | ||
216 | case 0: | ||
217 | return s; | ||
218 | case 1: | ||
219 | *(unsigned char *)s = pattern; | ||
220 | return s; | ||
221 | case 2: | ||
222 | *(unsigned short *)s = pattern; | ||
223 | return s; | ||
224 | case 3: | ||
225 | *(unsigned short *)s = pattern; | ||
226 | *(2+(unsigned char *)s) = pattern; | ||
227 | return s; | ||
228 | case 4: | ||
229 | *(unsigned long *)s = pattern; | ||
230 | return s; | ||
231 | } | ||
232 | #define COMMON(x) \ | ||
233 | __asm__ __volatile__( \ | ||
234 | "rep ; stosl" \ | ||
235 | x \ | ||
236 | : "=&c" (d0), "=&D" (d1) \ | ||
237 | : "a" (pattern),"0" (count/4),"1" ((long) s) \ | ||
238 | : "memory") | ||
239 | { | ||
240 | int d0, d1; | ||
241 | switch (count % 4) { | ||
242 | case 0: COMMON(""); return s; | ||
243 | case 1: COMMON("\n\tstosb"); return s; | ||
244 | case 2: COMMON("\n\tstosw"); return s; | ||
245 | default: COMMON("\n\tstosw\n\tstosb"); return s; | ||
246 | } | ||
247 | } | ||
248 | |||
249 | #undef COMMON | ||
250 | } | ||
251 | |||
252 | #define __constant_c_x_memset(s, c, count) \ | ||
253 | (__builtin_constant_p(count) ? \ | ||
254 | __constant_c_and_count_memset((s),(c),(count)) : \ | ||
255 | __constant_c_memset((s),(c),(count))) | ||
256 | |||
257 | #define __memset(s, c, count) \ | ||
258 | (__builtin_constant_p(count) ? \ | ||
259 | __constant_count_memset((s),(c),(count)) : \ | ||
260 | __memset_generic((s),(c),(count))) | ||
261 | |||
262 | #define __HAVE_ARCH_MEMSET | ||
263 | #define memset(s, c, count) \ | ||
264 | (__builtin_constant_p(c) ? \ | ||
265 | __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \ | ||
266 | __memset((s),(c),(count))) | ||
267 | |||
268 | /* | ||
269 | * find the first occurrence of byte 'c', or 1 past the area if none | ||
270 | */ | ||
271 | #define __HAVE_ARCH_MEMSCAN | ||
272 | extern void *memscan(void * addr, int c, size_t size); | ||
273 | |||
274 | #endif /* __KERNEL__ */ | ||
275 | |||
276 | #endif | ||