aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorRoman Zippel <zippel@linux-m68k.org>2006-06-25 08:46:53 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2006-06-25 13:00:56 -0400
commit53617825ccf3ff8a71e6efcf3dcf58885ed6f3e5 (patch)
treeb653ea903472f2653840a16d729f3b36bec6fd5e /include
parentcaad3c2a15dfa2e0da8cf51a57c052372123483c (diff)
[PATCH] m68k: fix uaccess.h for gcc-3.x
gcc-3.x has a few problems detecting a constant parameter. Signed-off-by: Roman Zippel <zippel@linux-m68k.org> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-m68k/uaccess.h234
1 files changed, 127 insertions, 107 deletions
diff --git a/include/asm-m68k/uaccess.h b/include/asm-m68k/uaccess.h
index b761ef218cea..88b1f47400e1 100644
--- a/include/asm-m68k/uaccess.h
+++ b/include/asm-m68k/uaccess.h
@@ -181,144 +181,164 @@ asm volatile ("\n" \
181unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n); 181unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
182unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n); 182unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
183 183
184#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
185 asm volatile ("\n" \
186 "1: moves."#s1" (%2)+,%3\n" \
187 " move."#s1" %3,(%1)+\n" \
188 "2: moves."#s2" (%2)+,%3\n" \
189 " move."#s2" %3,(%1)+\n" \
190 " .ifnc \""#s3"\",\"\"\n" \
191 "3: moves."#s3" (%2)+,%3\n" \
192 " move."#s3" %3,(%1)+\n" \
193 " .endif\n" \
194 "4:\n" \
195 " .section __ex_table,\"a\"\n" \
196 " .align 4\n" \
197 " .long 1b,10f\n" \
198 " .long 2b,20f\n" \
199 " .ifnc \""#s3"\",\"\"\n" \
200 " .long 3b,30f\n" \
201 " .endif\n" \
202 " .previous\n" \
203 "\n" \
204 " .section .fixup,\"ax\"\n" \
205 " .even\n" \
206 "10: clr."#s1" (%1)+\n" \
207 "20: clr."#s2" (%1)+\n" \
208 " .ifnc \""#s3"\",\"\"\n" \
209 "30: clr."#s3" (%1)+\n" \
210 " .endif\n" \
211 " moveq.l #"#n",%0\n" \
212 " jra 4b\n" \
213 " .previous\n" \
214 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
215 : : "memory")
216
184static __always_inline unsigned long 217static __always_inline unsigned long
185__constant_copy_from_user(void *to, const void __user *from, unsigned long n) 218__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
186{ 219{
187 unsigned long res = 0, tmp; 220 unsigned long res = 0, tmp;
188 221
189 /* limit the inlined version to 3 moves */
190 if (n == 11 || n > 12)
191 return __generic_copy_from_user(to, from, n);
192
193 switch (n) { 222 switch (n) {
194 case 1: 223 case 1:
195 __get_user_asm(res, *(u8 *)to, (u8 *)from, u8, b, d, 1); 224 __get_user_asm(res, *(u8 *)to, (u8 *)from, u8, b, d, 1);
196 return res; 225 break;
197 case 2: 226 case 2:
198 __get_user_asm(res, *(u16 *)to, (u16 *)from, u16, w, d, 2); 227 __get_user_asm(res, *(u16 *)to, (u16 *)from, u16, w, d, 2);
199 return res; 228 break;
229 case 3:
230 __constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
231 break;
200 case 4: 232 case 4:
201 __get_user_asm(res, *(u32 *)to, (u32 *)from, u32, l, r, 4); 233 __get_user_asm(res, *(u32 *)to, (u32 *)from, u32, l, r, 4);
202 return res; 234 break;
235 case 5:
236 __constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
237 break;
238 case 6:
239 __constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
240 break;
241 case 7:
242 __constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
243 break;
244 case 8:
245 __constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
246 break;
247 case 9:
248 __constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
249 break;
250 case 10:
251 __constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
252 break;
253 case 12:
254 __constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
255 break;
256 default:
257 /* we limit the inlined version to 3 moves */
258 return __generic_copy_from_user(to, from, n);
203 } 259 }
204 260
205 asm volatile ("\n"
206 " .ifndef .Lfrom_user\n"
207 " .set .Lfrom_user,1\n"
208 " .macro copy_from_user to,from,tmp\n"
209 " .if .Lcnt >= 4\n"
210 "1: moves.l (\\from)+,\\tmp\n"
211 " move.l \\tmp,(\\to)+\n"
212 " .set .Lcnt,.Lcnt-4\n"
213 " .elseif .Lcnt & 2\n"
214 "1: moves.w (\\from)+,\\tmp\n"
215 " move.w \\tmp,(\\to)+\n"
216 " .set .Lcnt,.Lcnt-2\n"
217 " .elseif .Lcnt & 1\n"
218 "1: moves.b (\\from)+,\\tmp\n"
219 " move.b \\tmp,(\\to)+\n"
220 " .set .Lcnt,.Lcnt-1\n"
221 " .else\n"
222 " .exitm\n"
223 " .endif\n"
224 "\n"
225 " .section __ex_table,\"a\"\n"
226 " .align 4\n"
227 " .long 1b,3f\n"
228 " .previous\n"
229 " .endm\n"
230 " .endif\n"
231 "\n"
232 " .set .Lcnt,%c4\n"
233 " copy_from_user %1,%2,%3\n"
234 " copy_from_user %1,%2,%3\n"
235 " copy_from_user %1,%2,%3\n"
236 "2:\n"
237 " .section .fixup,\"ax\"\n"
238 " .even\n"
239 "3: moveq.l %4,%0\n"
240 " move.l %5,%1\n"
241 " .rept %c4 / 4\n"
242 " clr.l (%1)+\n"
243 " .endr\n"
244 " .if %c4 & 2\n"
245 " clr.w (%1)+\n"
246 " .endif\n"
247 " .if %c4 & 1\n"
248 " clr.b (%1)+\n"
249 " .endif\n"
250 " jra 2b\n"
251 " .previous\n"
252 : "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp)
253 : "i" (n), "g" (to)
254 : "memory");
255
256 return res; 261 return res;
257} 262}
258 263
264#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
265 asm volatile ("\n" \
266 " move."#s1" (%2)+,%3\n" \
267 "11: moves."#s1" %3,(%1)+\n" \
268 "12: move."#s2" (%2)+,%3\n" \
269 "21: moves."#s2" %3,(%1)+\n" \
270 "22:\n" \
271 " .ifnc \""#s3"\",\"\"\n" \
272 " move."#s3" (%2)+,%3\n" \
273 "31: moves."#s3" %3,(%1)+\n" \
274 "32:\n" \
275 " .endif\n" \
276 "4:\n" \
277 "\n" \
278 " .section __ex_table,\"a\"\n" \
279 " .align 4\n" \
280 " .long 11b,5f\n" \
281 " .long 12b,5f\n" \
282 " .long 21b,5f\n" \
283 " .long 22b,5f\n" \
284 " .ifnc \""#s3"\",\"\"\n" \
285 " .long 31b,5f\n" \
286 " .long 32b,5f\n" \
287 " .endif\n" \
288 " .previous\n" \
289 "\n" \
290 " .section .fixup,\"ax\"\n" \
291 " .even\n" \
292 "5: moveq.l #"#n",%0\n" \
293 " jra 4b\n" \
294 " .previous\n" \
295 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
296 : : "memory")
297
259static __always_inline unsigned long 298static __always_inline unsigned long
260__constant_copy_to_user(void __user *to, const void *from, unsigned long n) 299__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
261{ 300{
262 unsigned long res = 0, tmp; 301 unsigned long res = 0, tmp;
263 302
264 /* limit the inlined version to 3 moves */
265 if (n == 11 || n > 12)
266 return __generic_copy_to_user(to, from, n);
267
268 switch (n) { 303 switch (n) {
269 case 1: 304 case 1:
270 __put_user_asm(res, *(u8 *)from, (u8 *)to, b, d, 1); 305 __put_user_asm(res, *(u8 *)from, (u8 *)to, b, d, 1);
271 return res; 306 break;
272 case 2: 307 case 2:
273 __put_user_asm(res, *(u16 *)from, (u16 *)to, w, d, 2); 308 __put_user_asm(res, *(u16 *)from, (u16 *)to, w, d, 2);
274 return res; 309 break;
310 case 3:
311 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
312 break;
275 case 4: 313 case 4:
276 __put_user_asm(res, *(u32 *)from, (u32 *)to, l, r, 4); 314 __put_user_asm(res, *(u32 *)from, (u32 *)to, l, r, 4);
277 return res; 315 break;
316 case 5:
317 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
318 break;
319 case 6:
320 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
321 break;
322 case 7:
323 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
324 break;
325 case 8:
326 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
327 break;
328 case 9:
329 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
330 break;
331 case 10:
332 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
333 break;
334 case 12:
335 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
336 break;
337 default:
338 /* limit the inlined version to 3 moves */
339 return __generic_copy_to_user(to, from, n);
278 } 340 }
279 341
280 asm volatile ("\n"
281 " .ifndef .Lto_user\n"
282 " .set .Lto_user,1\n"
283 " .macro copy_to_user to,from,tmp\n"
284 " .if .Lcnt >= 4\n"
285 " move.l (\\from)+,\\tmp\n"
286 "11: moves.l \\tmp,(\\to)+\n"
287 "12: .set .Lcnt,.Lcnt-4\n"
288 " .elseif .Lcnt & 2\n"
289 " move.w (\\from)+,\\tmp\n"
290 "11: moves.w \\tmp,(\\to)+\n"
291 "12: .set .Lcnt,.Lcnt-2\n"
292 " .elseif .Lcnt & 1\n"
293 " move.b (\\from)+,\\tmp\n"
294 "11: moves.b \\tmp,(\\to)+\n"
295 "12: .set .Lcnt,.Lcnt-1\n"
296 " .else\n"
297 " .exitm\n"
298 " .endif\n"
299 "\n"
300 " .section __ex_table,\"a\"\n"
301 " .align 4\n"
302 " .long 11b,3f\n"
303 " .long 12b,3f\n"
304 " .previous\n"
305 " .endm\n"
306 " .endif\n"
307 "\n"
308 " .set .Lcnt,%c4\n"
309 " copy_to_user %1,%2,%3\n"
310 " copy_to_user %1,%2,%3\n"
311 " copy_to_user %1,%2,%3\n"
312 "2:\n"
313 " .section .fixup,\"ax\"\n"
314 " .even\n"
315 "3: moveq.l %4,%0\n"
316 " jra 2b\n"
317 " .previous\n"
318 : "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp)
319 : "i" (n)
320 : "memory");
321
322 return res; 342 return res;
323} 343}
324 344