aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorHelge Deller <deller@gmx.de>2007-01-28 08:52:57 -0500
committerKyle McMartin <kyle@athena.road.mcmartin.ca>2007-02-17 01:16:26 -0500
commit0b3d643f9ead9b5141dedbb2d1b06ce15469fc4a (patch)
tree3be51559fb366dea87dc0eacfea2f94c15190875 /arch
parent8e9e9844b44dd9f855d824d035b3097b199e44ed (diff)
[PARISC] add ASM_EXCEPTIONTABLE_ENTRY() macro
- this macro unifies the code to add exception table entries - additionally use ENTRY()/ENDPROC() at more places Signed-off-by: Helge Deller <deller@gmx.de> Signed-off-by: Kyle McMartin <kyle@parisc-linux.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/parisc/kernel/pacache.S9
-rw-r--r--arch/parisc/kernel/syscall.S26
-rw-r--r--arch/parisc/kernel/unaligned.c112
-rw-r--r--arch/parisc/lib/fixup.S20
-rw-r--r--arch/parisc/lib/lusercopy.S37
-rw-r--r--arch/parisc/lib/memcpy.c38
6 files changed, 62 insertions, 180 deletions
diff --git a/arch/parisc/kernel/pacache.S b/arch/parisc/kernel/pacache.S
index 75d522e2d058..90b240878520 100644
--- a/arch/parisc/kernel/pacache.S
+++ b/arch/parisc/kernel/pacache.S
@@ -27,20 +27,11 @@
27 */ 27 */
28 28
29#ifdef CONFIG_64BIT 29#ifdef CONFIG_64BIT
30#define ADDIB addib,*
31#define CMPB cmpb,*
32#define ANDCM andcm,*
33
34 .level 2.0w 30 .level 2.0w
35#else 31#else
36#define ADDIB addib,
37#define CMPB cmpb,
38#define ANDCM andcm
39
40 .level 2.0 32 .level 2.0
41#endif 33#endif
42 34
43
44#include <asm/psw.h> 35#include <asm/psw.h>
45#include <asm/assembly.h> 36#include <asm/assembly.h>
46#include <asm/pgtable.h> 37#include <asm/pgtable.h>
diff --git a/arch/parisc/kernel/syscall.S b/arch/parisc/kernel/syscall.S
index de1812de5183..10859f53e94f 100644
--- a/arch/parisc/kernel/syscall.S
+++ b/arch/parisc/kernel/syscall.S
@@ -23,19 +23,7 @@
23 */ 23 */
24#define KILL_INSN break 0,0 24#define KILL_INSN break 0,0
25 25
26#ifdef CONFIG_64BIT 26 .level LEVEL
27 .level 2.0w
28#else
29 .level 1.1
30#endif
31
32/* on 64bit pad to 64bit values */
33#ifdef CONFIG_64BIT
34#define ULONG_WORD(x) .word 0, x
35#else
36#define ULONG_WORD(x) .word x
37#endif
38
39 27
40 .text 28 .text
41 29
@@ -603,16 +591,10 @@ cas_action:
603 the other for the store. Either return -EFAULT. 591 the other for the store. Either return -EFAULT.
604 Each of the entries must be relocated. */ 592 Each of the entries must be relocated. */
605 .section __ex_table,"aw" 593 .section __ex_table,"aw"
606 ULONG_WORD(2b - linux_gateway_page) 594 ASM_ULONG_INSN (1b - linux_gateway_page), (3b - linux_gateway_page)
607 ULONG_WORD(3b - linux_gateway_page) 595 ASM_ULONG_INSN (2b - linux_gateway_page), (3b - linux_gateway_page)
608 .previous
609
610 .section __ex_table,"aw"
611 ULONG_WORD(1b - linux_gateway_page)
612 ULONG_WORD(3b - linux_gateway_page)
613 .previous 596 .previous
614 597
615end_compare_and_swap:
616 598
617 /* Make sure nothing else is placed on this page */ 599 /* Make sure nothing else is placed on this page */
618 .align ASM_PAGE_SIZE 600 .align ASM_PAGE_SIZE
@@ -622,7 +604,7 @@ ENTRY(end_linux_gateway_page)
622 /* Relocate symbols assuming linux_gateway_page is mapped 604 /* Relocate symbols assuming linux_gateway_page is mapped
623 to virtual address 0x0 */ 605 to virtual address 0x0 */
624 606
625#define LWS_ENTRY(_name_) ULONG_WORD(lws_##_name_ - linux_gateway_page) 607#define LWS_ENTRY(_name_) ASM_ULONG_INSN (lws_##_name_ - linux_gateway_page)
626 608
627 .section .rodata,"a" 609 .section .rodata,"a"
628 610
diff --git a/arch/parisc/kernel/unaligned.c b/arch/parisc/kernel/unaligned.c
index 58ca443100a2..347bb922e6d0 100644
--- a/arch/parisc/kernel/unaligned.c
+++ b/arch/parisc/kernel/unaligned.c
@@ -35,7 +35,7 @@
35#define DPRINTF(fmt, args...) 35#define DPRINTF(fmt, args...)
36#endif 36#endif
37 37
38#ifdef __LP64__ 38#ifdef CONFIG_64BIT
39#define RFMT "%016lx" 39#define RFMT "%016lx"
40#else 40#else
41#define RFMT "%08lx" 41#define RFMT "%08lx"
@@ -150,15 +150,8 @@ static int emulate_ldh(struct pt_regs *regs, int toreg)
150"4: ldi -2, %1\n" 150"4: ldi -2, %1\n"
151 FIXUP_BRANCH(3b) 151 FIXUP_BRANCH(3b)
152" .previous\n" 152" .previous\n"
153" .section __ex_table,\"aw\"\n" 153 ASM_EXCEPTIONTABLE_ENTRY(1b, 4b)
154#ifdef __LP64__ 154 ASM_EXCEPTIONTABLE_ENTRY(2b, 4b)
155" .dword 1b,4b\n"
156" .dword 2b,4b\n"
157#else
158" .word 1b,4b\n"
159" .word 2b,4b\n"
160#endif
161" .previous\n"
162 : "=r" (val), "=r" (ret) 155 : "=r" (val), "=r" (ret)
163 : "0" (val), "r" (saddr), "r" (regs->isr) 156 : "0" (val), "r" (saddr), "r" (regs->isr)
164 : "r20", FIXUP_BRANCH_CLOBBER ); 157 : "r20", FIXUP_BRANCH_CLOBBER );
@@ -195,15 +188,8 @@ static int emulate_ldw(struct pt_regs *regs, int toreg, int flop)
195"4: ldi -2, %1\n" 188"4: ldi -2, %1\n"
196 FIXUP_BRANCH(3b) 189 FIXUP_BRANCH(3b)
197" .previous\n" 190" .previous\n"
198" .section __ex_table,\"aw\"\n" 191 ASM_EXCEPTIONTABLE_ENTRY(1b, 4b)
199#ifdef __LP64__ 192 ASM_EXCEPTIONTABLE_ENTRY(2b, 4b)
200" .dword 1b,4b\n"
201" .dword 2b,4b\n"
202#else
203" .word 1b,4b\n"
204" .word 2b,4b\n"
205#endif
206" .previous\n"
207 : "=r" (val), "=r" (ret) 193 : "=r" (val), "=r" (ret)
208 : "0" (val), "r" (saddr), "r" (regs->isr) 194 : "0" (val), "r" (saddr), "r" (regs->isr)
209 : "r19", "r20", FIXUP_BRANCH_CLOBBER ); 195 : "r19", "r20", FIXUP_BRANCH_CLOBBER );
@@ -227,7 +213,7 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
227 regs->isr, regs->ior, toreg); 213 regs->isr, regs->ior, toreg);
228#ifdef CONFIG_PA20 214#ifdef CONFIG_PA20
229 215
230#ifndef __LP64__ 216#ifndef CONFIG_64BIT
231 if (!flop) 217 if (!flop)
232 return -1; 218 return -1;
233#endif 219#endif
@@ -246,15 +232,8 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
246"4: ldi -2, %1\n" 232"4: ldi -2, %1\n"
247 FIXUP_BRANCH(3b) 233 FIXUP_BRANCH(3b)
248" .previous\n" 234" .previous\n"
249" .section __ex_table,\"aw\"\n" 235 ASM_EXCEPTIONTABLE_ENTRY(1b,4b)
250#ifdef __LP64__ 236 ASM_EXCEPTIONTABLE_ENTRY(2b,4b)
251" .dword 1b,4b\n"
252" .dword 2b,4b\n"
253#else
254" .word 1b,4b\n"
255" .word 2b,4b\n"
256#endif
257" .previous\n"
258 : "=r" (val), "=r" (ret) 237 : "=r" (val), "=r" (ret)
259 : "0" (val), "r" (saddr), "r" (regs->isr) 238 : "0" (val), "r" (saddr), "r" (regs->isr)
260 : "r19", "r20", FIXUP_BRANCH_CLOBBER ); 239 : "r19", "r20", FIXUP_BRANCH_CLOBBER );
@@ -278,17 +257,9 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
278"5: ldi -2, %2\n" 257"5: ldi -2, %2\n"
279 FIXUP_BRANCH(4b) 258 FIXUP_BRANCH(4b)
280" .previous\n" 259" .previous\n"
281" .section __ex_table,\"aw\"\n" 260 ASM_EXCEPTIONTABLE_ENTRY(1b,5b)
282#ifdef __LP64__ 261 ASM_EXCEPTIONTABLE_ENTRY(2b,5b)
283" .dword 1b,5b\n" 262 ASM_EXCEPTIONTABLE_ENTRY(3b,5b)
284" .dword 2b,5b\n"
285" .dword 3b,5b\n"
286#else
287" .word 1b,5b\n"
288" .word 2b,5b\n"
289" .word 3b,5b\n"
290#endif
291" .previous\n"
292 : "=r" (valh), "=r" (vall), "=r" (ret) 263 : "=r" (valh), "=r" (vall), "=r" (ret)
293 : "0" (valh), "1" (vall), "r" (saddr), "r" (regs->isr) 264 : "0" (valh), "1" (vall), "r" (saddr), "r" (regs->isr)
294 : "r19", "r20", FIXUP_BRANCH_CLOBBER ); 265 : "r19", "r20", FIXUP_BRANCH_CLOBBER );
@@ -328,15 +299,8 @@ static int emulate_sth(struct pt_regs *regs, int frreg)
328"4: ldi -2, %0\n" 299"4: ldi -2, %0\n"
329 FIXUP_BRANCH(3b) 300 FIXUP_BRANCH(3b)
330" .previous\n" 301" .previous\n"
331" .section __ex_table,\"aw\"\n" 302 ASM_EXCEPTIONTABLE_ENTRY(1b,4b)
332#ifdef __LP64__ 303 ASM_EXCEPTIONTABLE_ENTRY(2b,4b)
333" .dword 1b,4b\n"
334" .dword 2b,4b\n"
335#else
336" .word 1b,4b\n"
337" .word 2b,4b\n"
338#endif
339" .previous\n"
340 : "=r" (ret) 304 : "=r" (ret)
341 : "r" (val), "r" (regs->ior), "r" (regs->isr) 305 : "r" (val), "r" (regs->ior), "r" (regs->isr)
342 : "r19", FIXUP_BRANCH_CLOBBER ); 306 : "r19", FIXUP_BRANCH_CLOBBER );
@@ -382,15 +346,8 @@ static int emulate_stw(struct pt_regs *regs, int frreg, int flop)
382"4: ldi -2, %0\n" 346"4: ldi -2, %0\n"
383 FIXUP_BRANCH(3b) 347 FIXUP_BRANCH(3b)
384" .previous\n" 348" .previous\n"
385" .section __ex_table,\"aw\"\n" 349 ASM_EXCEPTIONTABLE_ENTRY(1b,4b)
386#ifdef __LP64__ 350 ASM_EXCEPTIONTABLE_ENTRY(2b,4b)
387" .dword 1b,4b\n"
388" .dword 2b,4b\n"
389#else
390" .word 1b,4b\n"
391" .word 2b,4b\n"
392#endif
393" .previous\n"
394 : "=r" (ret) 351 : "=r" (ret)
395 : "r" (val), "r" (regs->ior), "r" (regs->isr) 352 : "r" (val), "r" (regs->ior), "r" (regs->isr)
396 : "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER ); 353 : "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER );
@@ -413,7 +370,7 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
413 val, regs->isr, regs->ior); 370 val, regs->isr, regs->ior);
414 371
415#ifdef CONFIG_PA20 372#ifdef CONFIG_PA20
416#ifndef __LP64__ 373#ifndef CONFIG_64BIT
417 if (!flop) 374 if (!flop)
418 return -1; 375 return -1;
419#endif 376#endif
@@ -439,19 +396,10 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
439"6: ldi -2, %0\n" 396"6: ldi -2, %0\n"
440 FIXUP_BRANCH(5b) 397 FIXUP_BRANCH(5b)
441" .previous\n" 398" .previous\n"
442" .section __ex_table,\"aw\"\n" 399 ASM_EXCEPTIONTABLE_ENTRY(1b,6b)
443#ifdef __LP64__ 400 ASM_EXCEPTIONTABLE_ENTRY(2b,6b)
444" .dword 1b,6b\n" 401 ASM_EXCEPTIONTABLE_ENTRY(3b,6b)
445" .dword 2b,6b\n" 402 ASM_EXCEPTIONTABLE_ENTRY(4b,6b)
446" .dword 3b,6b\n"
447" .dword 4b,6b\n"
448#else
449" .word 1b,6b\n"
450" .word 2b,6b\n"
451" .word 3b,6b\n"
452" .word 4b,6b\n"
453#endif
454" .previous\n"
455 : "=r" (ret) 403 : "=r" (ret)
456 : "r" (val), "r" (regs->ior), "r" (regs->isr) 404 : "r" (val), "r" (regs->ior), "r" (regs->isr)
457 : "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER ); 405 : "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER );
@@ -482,21 +430,11 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
482"7: ldi -2, %0\n" 430"7: ldi -2, %0\n"
483 FIXUP_BRANCH(6b) 431 FIXUP_BRANCH(6b)
484" .previous\n" 432" .previous\n"
485" .section __ex_table,\"aw\"\n" 433 ASM_EXCEPTIONTABLE_ENTRY(1b,7b)
486#ifdef __LP64__ 434 ASM_EXCEPTIONTABLE_ENTRY(2b,7b)
487" .dword 1b,7b\n" 435 ASM_EXCEPTIONTABLE_ENTRY(3b,7b)
488" .dword 2b,7b\n" 436 ASM_EXCEPTIONTABLE_ENTRY(4b,7b)
489" .dword 3b,7b\n" 437 ASM_EXCEPTIONTABLE_ENTRY(5b,7b)
490" .dword 4b,7b\n"
491" .dword 5b,7b\n"
492#else
493" .word 1b,7b\n"
494" .word 2b,7b\n"
495" .word 3b,7b\n"
496" .word 4b,7b\n"
497" .word 5b,7b\n"
498#endif
499" .previous\n"
500 : "=r" (ret) 438 : "=r" (ret)
501 : "r" (valh), "r" (vall), "r" (regs->ior), "r" (regs->isr) 439 : "r" (valh), "r" (vall), "r" (regs->ior), "r" (regs->isr)
502 : "r19", "r20", "r21", "r1", FIXUP_BRANCH_CLOBBER ); 440 : "r19", "r20", "r21", "r1", FIXUP_BRANCH_CLOBBER );
diff --git a/arch/parisc/lib/fixup.S b/arch/parisc/lib/fixup.S
index ecce3d35401f..d172d4245cdc 100644
--- a/arch/parisc/lib/fixup.S
+++ b/arch/parisc/lib/fixup.S
@@ -22,6 +22,7 @@
22#include <asm/asm-offsets.h> 22#include <asm/asm-offsets.h>
23#include <asm/assembly.h> 23#include <asm/assembly.h>
24#include <asm/errno.h> 24#include <asm/errno.h>
25#include <linux/linkage.h>
25 26
26#ifdef CONFIG_SMP 27#ifdef CONFIG_SMP
27 .macro get_fault_ip t1 t2 28 .macro get_fault_ip t1 t2
@@ -30,7 +31,7 @@
30 /* t2 = smp_processor_id() */ 31 /* t2 = smp_processor_id() */
31 mfctl 30,\t2 32 mfctl 30,\t2
32 ldw TI_CPU(\t2),\t2 33 ldw TI_CPU(\t2),\t2
33#ifdef __LP64__ 34#ifdef CONFIG_64BIT
34 extrd,u \t2,63,32,\t2 35 extrd,u \t2,63,32,\t2
35#endif 36#endif
36 /* t2 = &__per_cpu_offset[smp_processor_id()]; */ 37 /* t2 = &__per_cpu_offset[smp_processor_id()]; */
@@ -58,33 +59,34 @@
58 .section .fixup, "ax" 59 .section .fixup, "ax"
59 60
60 /* get_user() fixups, store -EFAULT in r8, and 0 in r9 */ 61 /* get_user() fixups, store -EFAULT in r8, and 0 in r9 */
61 .export fixup_get_user_skip_1 62ENTRY(fixup_get_user_skip_1)
62fixup_get_user_skip_1:
63 get_fault_ip %r1,%r8 63 get_fault_ip %r1,%r8
64 ldo 4(%r1), %r1 64 ldo 4(%r1), %r1
65 ldi -EFAULT, %r8 65 ldi -EFAULT, %r8
66 bv %r0(%r1) 66 bv %r0(%r1)
67 copy %r0, %r9 67 copy %r0, %r9
68ENDPROC(fixup_get_user_skip_1)
68 69
69 .export fixup_get_user_skip_2 70ENTRY(fixup_get_user_skip_2)
70fixup_get_user_skip_2:
71 get_fault_ip %r1,%r8 71 get_fault_ip %r1,%r8
72 ldo 8(%r1), %r1 72 ldo 8(%r1), %r1
73 ldi -EFAULT, %r8 73 ldi -EFAULT, %r8
74 bv %r0(%r1) 74 bv %r0(%r1)
75 copy %r0, %r9 75 copy %r0, %r9
76ENDPROC(fixup_get_user_skip_2)
76 77
77 /* put_user() fixups, store -EFAULT in r8 */ 78 /* put_user() fixups, store -EFAULT in r8 */
78 .export fixup_put_user_skip_1 79ENTRY(fixup_put_user_skip_1)
79fixup_put_user_skip_1:
80 get_fault_ip %r1,%r8 80 get_fault_ip %r1,%r8
81 ldo 4(%r1), %r1 81 ldo 4(%r1), %r1
82 bv %r0(%r1) 82 bv %r0(%r1)
83 ldi -EFAULT, %r8 83 ldi -EFAULT, %r8
84ENDPROC(fixup_put_user_skip_1)
84 85
85 .export fixup_put_user_skip_2 86ENTRY(fixup_put_user_skip_2)
86fixup_put_user_skip_2:
87 get_fault_ip %r1,%r8 87 get_fault_ip %r1,%r8
88 ldo 8(%r1), %r1 88 ldo 8(%r1), %r1
89 bv %r0(%r1) 89 bv %r0(%r1)
90 ldi -EFAULT, %r8 90 ldi -EFAULT, %r8
91ENDPROC(fixup_put_user_skip_2)
92
diff --git a/arch/parisc/lib/lusercopy.S b/arch/parisc/lib/lusercopy.S
index a0509855c9a7..1bd23ccec17b 100644
--- a/arch/parisc/lib/lusercopy.S
+++ b/arch/parisc/lib/lusercopy.S
@@ -37,6 +37,7 @@
37 37
38#include <asm/assembly.h> 38#include <asm/assembly.h>
39#include <asm/errno.h> 39#include <asm/errno.h>
40#include <linux/linkage.h>
40 41
41 /* 42 /*
42 * get_sr gets the appropriate space value into 43 * get_sr gets the appropriate space value into
@@ -67,8 +68,7 @@
67 * otherwise strlen (i.e. excludes zero byte) 68 * otherwise strlen (i.e. excludes zero byte)
68 */ 69 */
69 70
70 .export lstrncpy_from_user,code 71ENTRY(lstrncpy_from_user)
71lstrncpy_from_user:
72 .proc 72 .proc
73 .callinfo NO_CALLS 73 .callinfo NO_CALLS
74 .entry 74 .entry
@@ -87,6 +87,7 @@ $lsfu_exit:
87 bv %r0(%r2) 87 bv %r0(%r2)
88 nop 88 nop
89 .exit 89 .exit
90ENDPROC(lstrncpy_from_user)
90 91
91 .section .fixup,"ax" 92 .section .fixup,"ax"
923: fixup_branch $lsfu_exit 933: fixup_branch $lsfu_exit
@@ -94,13 +95,8 @@ $lsfu_exit:
94 .previous 95 .previous
95 96
96 .section __ex_table,"aw" 97 .section __ex_table,"aw"
97#ifdef __LP64__ 98 ASM_ULONG_INSN 1b,3b
98 .dword 1b,3b 99 ASM_ULONG_INSN 2b,3b
99 .dword 2b,3b
100#else
101 .word 1b,3b
102 .word 2b,3b
103#endif
104 .previous 100 .previous
105 101
106 .procend 102 .procend
@@ -112,8 +108,7 @@ $lsfu_exit:
112 * otherwise, returns number of bytes not transferred. 108 * otherwise, returns number of bytes not transferred.
113 */ 109 */
114 110
115 .export lclear_user,code 111ENTRY(lclear_user)
116lclear_user:
117 .proc 112 .proc
118 .callinfo NO_CALLS 113 .callinfo NO_CALLS
119 .entry 114 .entry
@@ -127,6 +122,7 @@ $lclu_done:
127 bv %r0(%r2) 122 bv %r0(%r2)
128 copy %r25,%r28 123 copy %r25,%r28
129 .exit 124 .exit
125ENDPROC(lclear_user)
130 126
131 .section .fixup,"ax" 127 .section .fixup,"ax"
1322: fixup_branch $lclu_done 1282: fixup_branch $lclu_done
@@ -134,11 +130,7 @@ $lclu_done:
134 .previous 130 .previous
135 131
136 .section __ex_table,"aw" 132 .section __ex_table,"aw"
137#ifdef __LP64__ 133 ASM_ULONG_INSN 1b,2b
138 .dword 1b,2b
139#else
140 .word 1b,2b
141#endif
142 .previous 134 .previous
143 135
144 .procend 136 .procend
@@ -151,8 +143,7 @@ $lclu_done:
151 * else strlen + 1 (i.e. includes zero byte). 143 * else strlen + 1 (i.e. includes zero byte).
152 */ 144 */
153 145
154 .export lstrnlen_user,code 146ENTRY(lstrnlen_user)
155lstrnlen_user:
156 .proc 147 .proc
157 .callinfo NO_CALLS 148 .callinfo NO_CALLS
158 .entry 149 .entry
@@ -172,6 +163,7 @@ $lslen_done:
172$lslen_nzero: 163$lslen_nzero:
173 b $lslen_done 164 b $lslen_done
174 ldo 1(%r26),%r26 /* special case for N == 0 */ 165 ldo 1(%r26),%r26 /* special case for N == 0 */
166ENDPROC(lstrnlen_user)
175 167
176 .section .fixup,"ax" 168 .section .fixup,"ax"
1773: fixup_branch $lslen_done 1693: fixup_branch $lslen_done
@@ -179,13 +171,8 @@ $lslen_nzero:
179 .previous 171 .previous
180 172
181 .section __ex_table,"aw" 173 .section __ex_table,"aw"
182#ifdef __LP64__ 174 ASM_ULONG_INSN 1b,3b
183 .dword 1b,3b 175 ASM_ULONG_INSN 2b,3b
184 .dword 2b,3b
185#else
186 .word 1b,3b
187 .word 2b,3b
188#endif
189 .previous 176 .previous
190 177
191 .procend 178 .procend
diff --git a/arch/parisc/lib/memcpy.c b/arch/parisc/lib/memcpy.c
index 5575e41f9d60..2c43ebe99a9c 100644
--- a/arch/parisc/lib/memcpy.c
+++ b/arch/parisc/lib/memcpy.c
@@ -96,30 +96,18 @@ DECLARE_PER_CPU(struct exception_data, exception_data);
96#define DPRINTF(fmt, args...) 96#define DPRINTF(fmt, args...)
97#endif 97#endif
98 98
99#ifndef __LP64__
100#define EXC_WORD ".word"
101#else
102#define EXC_WORD ".dword"
103#endif
104
105#define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ 99#define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \
106 __asm__ __volatile__ ( \ 100 __asm__ __volatile__ ( \
107 "1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n" \ 101 "1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n\t" \
108 "\t.section __ex_table,\"aw\"\n" \ 102 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
109 "\t" EXC_WORD "\t1b\n" \
110 "\t" EXC_WORD "\t" #_e "\n" \
111 "\t.previous\n" \
112 : _tt(_t), "+r"(_a) \ 103 : _tt(_t), "+r"(_a) \
113 : \ 104 : \
114 : "r8") 105 : "r8")
115 106
116#define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ 107#define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \
117 __asm__ __volatile__ ( \ 108 __asm__ __volatile__ ( \
118 "1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n" \ 109 "1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n\t" \
119 "\t.section __ex_table,\"aw\"\n" \ 110 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
120 "\t" EXC_WORD "\t1b\n" \
121 "\t" EXC_WORD "\t" #_e "\n" \
122 "\t.previous\n" \
123 : "+r"(_a) \ 111 : "+r"(_a) \
124 : _tt(_t) \ 112 : _tt(_t) \
125 : "r8") 113 : "r8")
@@ -133,22 +121,16 @@ DECLARE_PER_CPU(struct exception_data, exception_data);
133 121
134#define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \ 122#define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \
135 __asm__ __volatile__ ( \ 123 __asm__ __volatile__ ( \
136 "1:\t" #_insn " " #_o "(" _s ",%1), %0\n" \ 124 "1:\t" #_insn " " #_o "(" _s ",%1), %0\n\t" \
137 "\t.section __ex_table,\"aw\"\n" \ 125 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
138 "\t" EXC_WORD "\t1b\n" \
139 "\t" EXC_WORD "\t" #_e "\n" \
140 "\t.previous\n" \
141 : _tt(_t) \ 126 : _tt(_t) \
142 : "r"(_a) \ 127 : "r"(_a) \
143 : "r8") 128 : "r8")
144 129
145#define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \ 130#define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \
146 __asm__ __volatile__ ( \ 131 __asm__ __volatile__ ( \
147 "1:\t" #_insn " %0, " #_o "(" _s ",%1)\n" \ 132 "1:\t" #_insn " %0, " #_o "(" _s ",%1)\n\t" \
148 "\t.section __ex_table,\"aw\"\n" \ 133 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
149 "\t" EXC_WORD "\t1b\n" \
150 "\t" EXC_WORD "\t" #_e "\n" \
151 "\t.previous\n" \
152 : \ 134 : \
153 : _tt(_t), "r"(_a) \ 135 : _tt(_t), "r"(_a) \
154 : "r8") 136 : "r8")
@@ -167,8 +149,8 @@ extern inline void prefetch_dst(const void *addr)
167 __asm__("ldd 0(" d_space ",%0), %%r0" : : "r" (addr)); 149 __asm__("ldd 0(" d_space ",%0), %%r0" : : "r" (addr));
168} 150}
169#else 151#else
170#define prefetch_src(addr) 152#define prefetch_src(addr) do { } while(0)
171#define prefetch_dst(addr) 153#define prefetch_dst(addr) do { } while(0)
172#endif 154#endif
173 155
174/* Copy from a not-aligned src to an aligned dst, using shifts. Handles 4 words 156/* Copy from a not-aligned src to an aligned dst, using shifts. Handles 4 words