diff options
author | Helge Deller <deller@gmx.de> | 2006-12-19 16:33:58 -0500 |
---|---|---|
committer | Kyle McMartin <kyle@athena.road.mcmartin.ca> | 2007-02-17 00:54:42 -0500 |
commit | 94a1981df056e349b926164915894436acdb8dd5 (patch) | |
tree | 05f60dfffd2afb488f8959cf43840adb3e96b159 | |
parent | e38287647467188d8b8d1adadc6ab26267758151 (diff) |
[PARISC] whitespace cleanups and unify 32/64bit user-access assembler inlines
Signed-off-by: Helge Deller <deller@gmx.de>
Signed-off-by: Kyle McMartin <kyle@parisc-linux.org>
-rw-r--r-- | arch/parisc/mm/fault.c | 4 | ||||
-rw-r--r-- | include/asm-parisc/uaccess.h | 86 |
2 files changed, 27 insertions, 63 deletions
diff --git a/arch/parisc/mm/fault.c b/arch/parisc/mm/fault.c index 641f9c920eee..f6f67554c623 100644 --- a/arch/parisc/mm/fault.c +++ b/arch/parisc/mm/fault.c | |||
@@ -24,10 +24,6 @@ | |||
24 | /* dumped to the console via printk) */ | 24 | /* dumped to the console via printk) */ |
25 | 25 | ||
26 | 26 | ||
27 | /* Defines for parisc_acctyp() */ | ||
28 | #define READ 0 | ||
29 | #define WRITE 1 | ||
30 | |||
31 | /* Various important other fields */ | 27 | /* Various important other fields */ |
32 | #define bit22set(x) (x & 0x00000200) | 28 | #define bit22set(x) (x & 0x00000200) |
33 | #define bits23_25set(x) (x & 0x000001c0) | 29 | #define bits23_25set(x) (x & 0x000001c0) |
diff --git a/include/asm-parisc/uaccess.h b/include/asm-parisc/uaccess.h index 2e87e823600a..98c36dcadf80 100644 --- a/include/asm-parisc/uaccess.h +++ b/include/asm-parisc/uaccess.h | |||
@@ -42,16 +42,18 @@ static inline long access_ok(int type, const void __user * addr, | |||
42 | #define put_user __put_user | 42 | #define put_user __put_user |
43 | #define get_user __get_user | 43 | #define get_user __get_user |
44 | 44 | ||
45 | #if BITS_PER_LONG == 32 | 45 | #if !defined(__LP64__) |
46 | #define LDD_KERNEL(ptr) __get_kernel_bad(); | 46 | #define LDD_KERNEL(ptr) __get_kernel_bad(); |
47 | #define LDD_USER(ptr) __get_user_bad(); | 47 | #define LDD_USER(ptr) __get_user_bad(); |
48 | #define STD_KERNEL(x, ptr) __put_kernel_asm64(x,ptr) | 48 | #define STD_KERNEL(x, ptr) __put_kernel_asm64(x,ptr) |
49 | #define STD_USER(x, ptr) __put_user_asm64(x,ptr) | 49 | #define STD_USER(x, ptr) __put_user_asm64(x,ptr) |
50 | #define ASM_WORD_INSN ".word\t" | ||
50 | #else | 51 | #else |
51 | #define LDD_KERNEL(ptr) __get_kernel_asm("ldd",ptr) | 52 | #define LDD_KERNEL(ptr) __get_kernel_asm("ldd",ptr) |
52 | #define LDD_USER(ptr) __get_user_asm("ldd",ptr) | 53 | #define LDD_USER(ptr) __get_user_asm("ldd",ptr) |
53 | #define STD_KERNEL(x, ptr) __put_kernel_asm("std",x,ptr) | 54 | #define STD_KERNEL(x, ptr) __put_kernel_asm("std",x,ptr) |
54 | #define STD_USER(x, ptr) __put_user_asm("std",x,ptr) | 55 | #define STD_USER(x, ptr) __put_user_asm("std",x,ptr) |
56 | #define ASM_WORD_INSN ".dword\t" | ||
55 | #endif | 57 | #endif |
56 | 58 | ||
57 | /* | 59 | /* |
@@ -103,11 +105,11 @@ struct exception_data { | |||
103 | __gu_err; \ | 105 | __gu_err; \ |
104 | }) | 106 | }) |
105 | 107 | ||
106 | #ifdef __LP64__ | ||
107 | #define __get_kernel_asm(ldx,ptr) \ | 108 | #define __get_kernel_asm(ldx,ptr) \ |
108 | __asm__("\n1:\t" ldx "\t0(%2),%0\n" \ | 109 | __asm__("\n1:\t" ldx "\t0(%2),%0\n" \ |
109 | "\t.section __ex_table,\"aw\"\n" \ | 110 | "\t.section __ex_table,\"aw\"\n" \ |
110 | "\t.dword\t1b,fixup_get_user_skip_1\n" \ | 111 | "\t" ASM_WORD_INSN \ |
112 | "1b,fixup_get_user_skip_1\n" \ | ||
111 | "\t.previous" \ | 113 | "\t.previous" \ |
112 | : "=r"(__gu_val), "=r"(__gu_err) \ | 114 | : "=r"(__gu_val), "=r"(__gu_err) \ |
113 | : "r"(ptr), "1"(__gu_err) \ | 115 | : "r"(ptr), "1"(__gu_err) \ |
@@ -116,30 +118,12 @@ struct exception_data { | |||
116 | #define __get_user_asm(ldx,ptr) \ | 118 | #define __get_user_asm(ldx,ptr) \ |
117 | __asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n" \ | 119 | __asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n" \ |
118 | "\t.section __ex_table,\"aw\"\n" \ | 120 | "\t.section __ex_table,\"aw\"\n" \ |
119 | "\t.dword\t1b,fixup_get_user_skip_1\n" \ | 121 | "\t" ASM_WORD_INSN \ |
122 | "1b,fixup_get_user_skip_1\n" \ | ||
120 | "\t.previous" \ | 123 | "\t.previous" \ |
121 | : "=r"(__gu_val), "=r"(__gu_err) \ | 124 | : "=r"(__gu_val), "=r"(__gu_err) \ |
122 | : "r"(ptr), "1"(__gu_err) \ | 125 | : "r"(ptr), "1"(__gu_err) \ |
123 | : "r1"); | 126 | : "r1"); |
124 | #else | ||
125 | #define __get_kernel_asm(ldx,ptr) \ | ||
126 | __asm__("\n1:\t" ldx "\t0(%2),%0\n" \ | ||
127 | "\t.section __ex_table,\"aw\"\n" \ | ||
128 | "\t.word\t1b,fixup_get_user_skip_1\n" \ | ||
129 | "\t.previous" \ | ||
130 | : "=r"(__gu_val), "=r"(__gu_err) \ | ||
131 | : "r"(ptr), "1"(__gu_err) \ | ||
132 | : "r1"); | ||
133 | |||
134 | #define __get_user_asm(ldx,ptr) \ | ||
135 | __asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n" \ | ||
136 | "\t.section __ex_table,\"aw\"\n" \ | ||
137 | "\t.word\t1b,fixup_get_user_skip_1\n" \ | ||
138 | "\t.previous" \ | ||
139 | : "=r"(__gu_val), "=r"(__gu_err) \ | ||
140 | : "r"(ptr), "1"(__gu_err) \ | ||
141 | : "r1"); | ||
142 | #endif /* !__LP64__ */ | ||
143 | 127 | ||
144 | #define __put_user(x,ptr) \ | 128 | #define __put_user(x,ptr) \ |
145 | ({ \ | 129 | ({ \ |
@@ -178,12 +162,12 @@ struct exception_data { | |||
178 | * r8/r9 are already listed as err/val. | 162 | * r8/r9 are already listed as err/val. |
179 | */ | 163 | */ |
180 | 164 | ||
181 | #ifdef __LP64__ | ||
182 | #define __put_kernel_asm(stx,x,ptr) \ | 165 | #define __put_kernel_asm(stx,x,ptr) \ |
183 | __asm__ __volatile__ ( \ | 166 | __asm__ __volatile__ ( \ |
184 | "\n1:\t" stx "\t%2,0(%1)\n" \ | 167 | "\n1:\t" stx "\t%2,0(%1)\n" \ |
185 | "\t.section __ex_table,\"aw\"\n" \ | 168 | "\t.section __ex_table,\"aw\"\n" \ |
186 | "\t.dword\t1b,fixup_put_user_skip_1\n" \ | 169 | "\t" ASM_WORD_INSN \ |
170 | "1b,fixup_put_user_skip_1\n" \ | ||
187 | "\t.previous" \ | 171 | "\t.previous" \ |
188 | : "=r"(__pu_err) \ | 172 | : "=r"(__pu_err) \ |
189 | : "r"(ptr), "r"(x), "0"(__pu_err) \ | 173 | : "r"(ptr), "r"(x), "0"(__pu_err) \ |
@@ -193,36 +177,20 @@ struct exception_data { | |||
193 | __asm__ __volatile__ ( \ | 177 | __asm__ __volatile__ ( \ |
194 | "\n1:\t" stx "\t%2,0(%%sr3,%1)\n" \ | 178 | "\n1:\t" stx "\t%2,0(%%sr3,%1)\n" \ |
195 | "\t.section __ex_table,\"aw\"\n" \ | 179 | "\t.section __ex_table,\"aw\"\n" \ |
196 | "\t.dword\t1b,fixup_put_user_skip_1\n" \ | 180 | "\t" ASM_WORD_INSN \ |
197 | "\t.previous" \ | 181 | "1b,fixup_put_user_skip_1\n" \ |
198 | : "=r"(__pu_err) \ | 182 | "\t.previous" \ |
199 | : "r"(ptr), "r"(x), "0"(__pu_err) \ | ||
200 | : "r1") | ||
201 | #else | ||
202 | #define __put_kernel_asm(stx,x,ptr) \ | ||
203 | __asm__ __volatile__ ( \ | ||
204 | "\n1:\t" stx "\t%2,0(%1)\n" \ | ||
205 | "\t.section __ex_table,\"aw\"\n" \ | ||
206 | "\t.word\t1b,fixup_put_user_skip_1\n" \ | ||
207 | "\t.previous" \ | ||
208 | : "=r"(__pu_err) \ | 183 | : "=r"(__pu_err) \ |
209 | : "r"(ptr), "r"(x), "0"(__pu_err) \ | 184 | : "r"(ptr), "r"(x), "0"(__pu_err) \ |
210 | : "r1") | 185 | : "r1") |
211 | 186 | ||
212 | #define __put_user_asm(stx,x,ptr) \ | ||
213 | __asm__ __volatile__ ( \ | ||
214 | "\n1:\t" stx "\t%2,0(%%sr3,%1)\n" \ | ||
215 | "\t.section __ex_table,\"aw\"\n" \ | ||
216 | "\t.word\t1b,fixup_put_user_skip_1\n" \ | ||
217 | "\t.previous" \ | ||
218 | : "=r"(__pu_err) \ | ||
219 | : "r"(ptr), "r"(x), "0"(__pu_err) \ | ||
220 | : "r1") | ||
221 | 187 | ||
222 | #define __put_kernel_asm64(__val,ptr) do { \ | 188 | #if !defined(__LP64__) |
223 | u64 __val64 = (u64)(__val); \ | 189 | |
224 | u32 hi = (__val64) >> 32; \ | 190 | #define __put_kernel_asm64(__val,ptr) do { \ |
225 | u32 lo = (__val64) & 0xffffffff; \ | 191 | u64 __val64 = (u64)(__val); \ |
192 | u32 hi = (__val64) >> 32; \ | ||
193 | u32 lo = (__val64) & 0xffffffff; \ | ||
226 | __asm__ __volatile__ ( \ | 194 | __asm__ __volatile__ ( \ |
227 | "\n1:\tstw %2,0(%1)\n" \ | 195 | "\n1:\tstw %2,0(%1)\n" \ |
228 | "\n2:\tstw %3,4(%1)\n" \ | 196 | "\n2:\tstw %3,4(%1)\n" \ |
@@ -235,10 +203,10 @@ struct exception_data { | |||
235 | : "r1"); \ | 203 | : "r1"); \ |
236 | } while (0) | 204 | } while (0) |
237 | 205 | ||
238 | #define __put_user_asm64(__val,ptr) do { \ | 206 | #define __put_user_asm64(__val,ptr) do { \ |
239 | u64 __val64 = (u64)__val; \ | 207 | u64 __val64 = (u64)(__val); \ |
240 | u32 hi = (__val64) >> 32; \ | 208 | u32 hi = (__val64) >> 32; \ |
241 | u32 lo = (__val64) & 0xffffffff; \ | 209 | u32 lo = (__val64) & 0xffffffff; \ |
242 | __asm__ __volatile__ ( \ | 210 | __asm__ __volatile__ ( \ |
243 | "\n1:\tstw %2,0(%%sr3,%1)\n" \ | 211 | "\n1:\tstw %2,0(%%sr3,%1)\n" \ |
244 | "\n2:\tstw %3,4(%%sr3,%1)\n" \ | 212 | "\n2:\tstw %3,4(%%sr3,%1)\n" \ |
@@ -251,7 +219,7 @@ struct exception_data { | |||
251 | : "r1"); \ | 219 | : "r1"); \ |
252 | } while (0) | 220 | } while (0) |
253 | 221 | ||
254 | #endif /* !__LP64__ */ | 222 | #endif /* !defined(__LP64__) */ |
255 | 223 | ||
256 | 224 | ||
257 | /* | 225 | /* |