diff options
Diffstat (limited to 'arch/parisc/lib')
-rw-r--r-- | arch/parisc/lib/bitops.c | 4 | ||||
-rw-r--r-- | arch/parisc/lib/fixup.S | 20 | ||||
-rw-r--r-- | arch/parisc/lib/lusercopy.S | 37 | ||||
-rw-r--r-- | arch/parisc/lib/memcpy.c | 38 |
4 files changed, 35 insertions, 64 deletions
diff --git a/arch/parisc/lib/bitops.c b/arch/parisc/lib/bitops.c index f352666b5b2f..e3eb739fab19 100644 --- a/arch/parisc/lib/bitops.c +++ b/arch/parisc/lib/bitops.c | |||
@@ -17,7 +17,7 @@ raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { | |||
17 | }; | 17 | }; |
18 | #endif | 18 | #endif |
19 | 19 | ||
20 | #ifdef __LP64__ | 20 | #ifdef CONFIG_64BIT |
21 | unsigned long __xchg64(unsigned long x, unsigned long *ptr) | 21 | unsigned long __xchg64(unsigned long x, unsigned long *ptr) |
22 | { | 22 | { |
23 | unsigned long temp, flags; | 23 | unsigned long temp, flags; |
@@ -56,7 +56,7 @@ unsigned long __xchg8(char x, char *ptr) | |||
56 | } | 56 | } |
57 | 57 | ||
58 | 58 | ||
59 | #ifdef __LP64__ | 59 | #ifdef CONFIG_64BIT |
60 | unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new) | 60 | unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new) |
61 | { | 61 | { |
62 | unsigned long flags; | 62 | unsigned long flags; |
diff --git a/arch/parisc/lib/fixup.S b/arch/parisc/lib/fixup.S index ecce3d35401f..d172d4245cdc 100644 --- a/arch/parisc/lib/fixup.S +++ b/arch/parisc/lib/fixup.S | |||
@@ -22,6 +22,7 @@ | |||
22 | #include <asm/asm-offsets.h> | 22 | #include <asm/asm-offsets.h> |
23 | #include <asm/assembly.h> | 23 | #include <asm/assembly.h> |
24 | #include <asm/errno.h> | 24 | #include <asm/errno.h> |
25 | #include <linux/linkage.h> | ||
25 | 26 | ||
26 | #ifdef CONFIG_SMP | 27 | #ifdef CONFIG_SMP |
27 | .macro get_fault_ip t1 t2 | 28 | .macro get_fault_ip t1 t2 |
@@ -30,7 +31,7 @@ | |||
30 | /* t2 = smp_processor_id() */ | 31 | /* t2 = smp_processor_id() */ |
31 | mfctl 30,\t2 | 32 | mfctl 30,\t2 |
32 | ldw TI_CPU(\t2),\t2 | 33 | ldw TI_CPU(\t2),\t2 |
33 | #ifdef __LP64__ | 34 | #ifdef CONFIG_64BIT |
34 | extrd,u \t2,63,32,\t2 | 35 | extrd,u \t2,63,32,\t2 |
35 | #endif | 36 | #endif |
36 | /* t2 = &__per_cpu_offset[smp_processor_id()]; */ | 37 | /* t2 = &__per_cpu_offset[smp_processor_id()]; */ |
@@ -58,33 +59,34 @@ | |||
58 | .section .fixup, "ax" | 59 | .section .fixup, "ax" |
59 | 60 | ||
60 | /* get_user() fixups, store -EFAULT in r8, and 0 in r9 */ | 61 | /* get_user() fixups, store -EFAULT in r8, and 0 in r9 */ |
61 | .export fixup_get_user_skip_1 | 62 | ENTRY(fixup_get_user_skip_1) |
62 | fixup_get_user_skip_1: | ||
63 | get_fault_ip %r1,%r8 | 63 | get_fault_ip %r1,%r8 |
64 | ldo 4(%r1), %r1 | 64 | ldo 4(%r1), %r1 |
65 | ldi -EFAULT, %r8 | 65 | ldi -EFAULT, %r8 |
66 | bv %r0(%r1) | 66 | bv %r0(%r1) |
67 | copy %r0, %r9 | 67 | copy %r0, %r9 |
68 | ENDPROC(fixup_get_user_skip_1) | ||
68 | 69 | ||
69 | .export fixup_get_user_skip_2 | 70 | ENTRY(fixup_get_user_skip_2) |
70 | fixup_get_user_skip_2: | ||
71 | get_fault_ip %r1,%r8 | 71 | get_fault_ip %r1,%r8 |
72 | ldo 8(%r1), %r1 | 72 | ldo 8(%r1), %r1 |
73 | ldi -EFAULT, %r8 | 73 | ldi -EFAULT, %r8 |
74 | bv %r0(%r1) | 74 | bv %r0(%r1) |
75 | copy %r0, %r9 | 75 | copy %r0, %r9 |
76 | ENDPROC(fixup_get_user_skip_2) | ||
76 | 77 | ||
77 | /* put_user() fixups, store -EFAULT in r8 */ | 78 | /* put_user() fixups, store -EFAULT in r8 */ |
78 | .export fixup_put_user_skip_1 | 79 | ENTRY(fixup_put_user_skip_1) |
79 | fixup_put_user_skip_1: | ||
80 | get_fault_ip %r1,%r8 | 80 | get_fault_ip %r1,%r8 |
81 | ldo 4(%r1), %r1 | 81 | ldo 4(%r1), %r1 |
82 | bv %r0(%r1) | 82 | bv %r0(%r1) |
83 | ldi -EFAULT, %r8 | 83 | ldi -EFAULT, %r8 |
84 | ENDPROC(fixup_put_user_skip_1) | ||
84 | 85 | ||
85 | .export fixup_put_user_skip_2 | 86 | ENTRY(fixup_put_user_skip_2) |
86 | fixup_put_user_skip_2: | ||
87 | get_fault_ip %r1,%r8 | 87 | get_fault_ip %r1,%r8 |
88 | ldo 8(%r1), %r1 | 88 | ldo 8(%r1), %r1 |
89 | bv %r0(%r1) | 89 | bv %r0(%r1) |
90 | ldi -EFAULT, %r8 | 90 | ldi -EFAULT, %r8 |
91 | ENDPROC(fixup_put_user_skip_2) | ||
92 | |||
diff --git a/arch/parisc/lib/lusercopy.S b/arch/parisc/lib/lusercopy.S index a0509855c9a7..1bd23ccec17b 100644 --- a/arch/parisc/lib/lusercopy.S +++ b/arch/parisc/lib/lusercopy.S | |||
@@ -37,6 +37,7 @@ | |||
37 | 37 | ||
38 | #include <asm/assembly.h> | 38 | #include <asm/assembly.h> |
39 | #include <asm/errno.h> | 39 | #include <asm/errno.h> |
40 | #include <linux/linkage.h> | ||
40 | 41 | ||
41 | /* | 42 | /* |
42 | * get_sr gets the appropriate space value into | 43 | * get_sr gets the appropriate space value into |
@@ -67,8 +68,7 @@ | |||
67 | * otherwise strlen (i.e. excludes zero byte) | 68 | * otherwise strlen (i.e. excludes zero byte) |
68 | */ | 69 | */ |
69 | 70 | ||
70 | .export lstrncpy_from_user,code | 71 | ENTRY(lstrncpy_from_user) |
71 | lstrncpy_from_user: | ||
72 | .proc | 72 | .proc |
73 | .callinfo NO_CALLS | 73 | .callinfo NO_CALLS |
74 | .entry | 74 | .entry |
@@ -87,6 +87,7 @@ $lsfu_exit: | |||
87 | bv %r0(%r2) | 87 | bv %r0(%r2) |
88 | nop | 88 | nop |
89 | .exit | 89 | .exit |
90 | ENDPROC(lstrncpy_from_user) | ||
90 | 91 | ||
91 | .section .fixup,"ax" | 92 | .section .fixup,"ax" |
92 | 3: fixup_branch $lsfu_exit | 93 | 3: fixup_branch $lsfu_exit |
@@ -94,13 +95,8 @@ $lsfu_exit: | |||
94 | .previous | 95 | .previous |
95 | 96 | ||
96 | .section __ex_table,"aw" | 97 | .section __ex_table,"aw" |
97 | #ifdef __LP64__ | 98 | ASM_ULONG_INSN 1b,3b |
98 | .dword 1b,3b | 99 | ASM_ULONG_INSN 2b,3b |
99 | .dword 2b,3b | ||
100 | #else | ||
101 | .word 1b,3b | ||
102 | .word 2b,3b | ||
103 | #endif | ||
104 | .previous | 100 | .previous |
105 | 101 | ||
106 | .procend | 102 | .procend |
@@ -112,8 +108,7 @@ $lsfu_exit: | |||
112 | * otherwise, returns number of bytes not transferred. | 108 | * otherwise, returns number of bytes not transferred. |
113 | */ | 109 | */ |
114 | 110 | ||
115 | .export lclear_user,code | 111 | ENTRY(lclear_user) |
116 | lclear_user: | ||
117 | .proc | 112 | .proc |
118 | .callinfo NO_CALLS | 113 | .callinfo NO_CALLS |
119 | .entry | 114 | .entry |
@@ -127,6 +122,7 @@ $lclu_done: | |||
127 | bv %r0(%r2) | 122 | bv %r0(%r2) |
128 | copy %r25,%r28 | 123 | copy %r25,%r28 |
129 | .exit | 124 | .exit |
125 | ENDPROC(lclear_user) | ||
130 | 126 | ||
131 | .section .fixup,"ax" | 127 | .section .fixup,"ax" |
132 | 2: fixup_branch $lclu_done | 128 | 2: fixup_branch $lclu_done |
@@ -134,11 +130,7 @@ $lclu_done: | |||
134 | .previous | 130 | .previous |
135 | 131 | ||
136 | .section __ex_table,"aw" | 132 | .section __ex_table,"aw" |
137 | #ifdef __LP64__ | 133 | ASM_ULONG_INSN 1b,2b |
138 | .dword 1b,2b | ||
139 | #else | ||
140 | .word 1b,2b | ||
141 | #endif | ||
142 | .previous | 134 | .previous |
143 | 135 | ||
144 | .procend | 136 | .procend |
@@ -151,8 +143,7 @@ $lclu_done: | |||
151 | * else strlen + 1 (i.e. includes zero byte). | 143 | * else strlen + 1 (i.e. includes zero byte). |
152 | */ | 144 | */ |
153 | 145 | ||
154 | .export lstrnlen_user,code | 146 | ENTRY(lstrnlen_user) |
155 | lstrnlen_user: | ||
156 | .proc | 147 | .proc |
157 | .callinfo NO_CALLS | 148 | .callinfo NO_CALLS |
158 | .entry | 149 | .entry |
@@ -172,6 +163,7 @@ $lslen_done: | |||
172 | $lslen_nzero: | 163 | $lslen_nzero: |
173 | b $lslen_done | 164 | b $lslen_done |
174 | ldo 1(%r26),%r26 /* special case for N == 0 */ | 165 | ldo 1(%r26),%r26 /* special case for N == 0 */ |
166 | ENDPROC(lstrnlen_user) | ||
175 | 167 | ||
176 | .section .fixup,"ax" | 168 | .section .fixup,"ax" |
177 | 3: fixup_branch $lslen_done | 169 | 3: fixup_branch $lslen_done |
@@ -179,13 +171,8 @@ $lslen_nzero: | |||
179 | .previous | 171 | .previous |
180 | 172 | ||
181 | .section __ex_table,"aw" | 173 | .section __ex_table,"aw" |
182 | #ifdef __LP64__ | 174 | ASM_ULONG_INSN 1b,3b |
183 | .dword 1b,3b | 175 | ASM_ULONG_INSN 2b,3b |
184 | .dword 2b,3b | ||
185 | #else | ||
186 | .word 1b,3b | ||
187 | .word 2b,3b | ||
188 | #endif | ||
189 | .previous | 176 | .previous |
190 | 177 | ||
191 | .procend | 178 | .procend |
diff --git a/arch/parisc/lib/memcpy.c b/arch/parisc/lib/memcpy.c index 5575e41f9d60..2c43ebe99a9c 100644 --- a/arch/parisc/lib/memcpy.c +++ b/arch/parisc/lib/memcpy.c | |||
@@ -96,30 +96,18 @@ DECLARE_PER_CPU(struct exception_data, exception_data); | |||
96 | #define DPRINTF(fmt, args...) | 96 | #define DPRINTF(fmt, args...) |
97 | #endif | 97 | #endif |
98 | 98 | ||
99 | #ifndef __LP64__ | ||
100 | #define EXC_WORD ".word" | ||
101 | #else | ||
102 | #define EXC_WORD ".dword" | ||
103 | #endif | ||
104 | |||
105 | #define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ | 99 | #define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ |
106 | __asm__ __volatile__ ( \ | 100 | __asm__ __volatile__ ( \ |
107 | "1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n" \ | 101 | "1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n\t" \ |
108 | "\t.section __ex_table,\"aw\"\n" \ | 102 | ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \ |
109 | "\t" EXC_WORD "\t1b\n" \ | ||
110 | "\t" EXC_WORD "\t" #_e "\n" \ | ||
111 | "\t.previous\n" \ | ||
112 | : _tt(_t), "+r"(_a) \ | 103 | : _tt(_t), "+r"(_a) \ |
113 | : \ | 104 | : \ |
114 | : "r8") | 105 | : "r8") |
115 | 106 | ||
116 | #define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ | 107 | #define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ |
117 | __asm__ __volatile__ ( \ | 108 | __asm__ __volatile__ ( \ |
118 | "1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n" \ | 109 | "1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n\t" \ |
119 | "\t.section __ex_table,\"aw\"\n" \ | 110 | ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \ |
120 | "\t" EXC_WORD "\t1b\n" \ | ||
121 | "\t" EXC_WORD "\t" #_e "\n" \ | ||
122 | "\t.previous\n" \ | ||
123 | : "+r"(_a) \ | 111 | : "+r"(_a) \ |
124 | : _tt(_t) \ | 112 | : _tt(_t) \ |
125 | : "r8") | 113 | : "r8") |
@@ -133,22 +121,16 @@ DECLARE_PER_CPU(struct exception_data, exception_data); | |||
133 | 121 | ||
134 | #define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \ | 122 | #define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \ |
135 | __asm__ __volatile__ ( \ | 123 | __asm__ __volatile__ ( \ |
136 | "1:\t" #_insn " " #_o "(" _s ",%1), %0\n" \ | 124 | "1:\t" #_insn " " #_o "(" _s ",%1), %0\n\t" \ |
137 | "\t.section __ex_table,\"aw\"\n" \ | 125 | ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \ |
138 | "\t" EXC_WORD "\t1b\n" \ | ||
139 | "\t" EXC_WORD "\t" #_e "\n" \ | ||
140 | "\t.previous\n" \ | ||
141 | : _tt(_t) \ | 126 | : _tt(_t) \ |
142 | : "r"(_a) \ | 127 | : "r"(_a) \ |
143 | : "r8") | 128 | : "r8") |
144 | 129 | ||
145 | #define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \ | 130 | #define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \ |
146 | __asm__ __volatile__ ( \ | 131 | __asm__ __volatile__ ( \ |
147 | "1:\t" #_insn " %0, " #_o "(" _s ",%1)\n" \ | 132 | "1:\t" #_insn " %0, " #_o "(" _s ",%1)\n\t" \ |
148 | "\t.section __ex_table,\"aw\"\n" \ | 133 | ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \ |
149 | "\t" EXC_WORD "\t1b\n" \ | ||
150 | "\t" EXC_WORD "\t" #_e "\n" \ | ||
151 | "\t.previous\n" \ | ||
152 | : \ | 134 | : \ |
153 | : _tt(_t), "r"(_a) \ | 135 | : _tt(_t), "r"(_a) \ |
154 | : "r8") | 136 | : "r8") |
@@ -167,8 +149,8 @@ extern inline void prefetch_dst(const void *addr) | |||
167 | __asm__("ldd 0(" d_space ",%0), %%r0" : : "r" (addr)); | 149 | __asm__("ldd 0(" d_space ",%0), %%r0" : : "r" (addr)); |
168 | } | 150 | } |
169 | #else | 151 | #else |
170 | #define prefetch_src(addr) | 152 | #define prefetch_src(addr) do { } while(0) |
171 | #define prefetch_dst(addr) | 153 | #define prefetch_dst(addr) do { } while(0) |
172 | #endif | 154 | #endif |
173 | 155 | ||
174 | /* Copy from a not-aligned src to an aligned dst, using shifts. Handles 4 words | 156 | /* Copy from a not-aligned src to an aligned dst, using shifts. Handles 4 words |