aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorTom Musta <tommusta@gmail.com>2013-10-18 15:42:08 -0400
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2013-10-30 01:01:30 -0400
commit6506b4718bb59c5d4e59235b81b5e13ea5d3c49a (patch)
tree24a45ddb2ba911750bf3c2ac9b649560fb65342e /arch
parent630c8a5fc9fb2f3541652b65f23630757d304cc9 (diff)
powerpc: Fix Unaligned Loads and Stores
This patch modifies the unaligned access routines of the sstep.c module so that it properly reverses the bytes of storage operands in the little endian kernel kernel. This is implemented by breaking an unaligned little endian access into a combination of single byte accesses plus an overal byte reversal operation. Signed-off-by: Tom Musta <tmusta@gmail.com> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/powerpc/lib/sstep.c45
1 files changed, 45 insertions, 0 deletions
diff --git a/arch/powerpc/lib/sstep.c b/arch/powerpc/lib/sstep.c
index b1faa1593c90..0121d2140ab9 100644
--- a/arch/powerpc/lib/sstep.c
+++ b/arch/powerpc/lib/sstep.c
@@ -212,11 +212,19 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
212{ 212{
213 int err; 213 int err;
214 unsigned long x, b, c; 214 unsigned long x, b, c;
215#ifdef __LITTLE_ENDIAN__
216 int len = nb; /* save a copy of the length for byte reversal */
217#endif
215 218
216 /* unaligned, do this in pieces */ 219 /* unaligned, do this in pieces */
217 x = 0; 220 x = 0;
218 for (; nb > 0; nb -= c) { 221 for (; nb > 0; nb -= c) {
222#ifdef __LITTLE_ENDIAN__
223 c = 1;
224#endif
225#ifdef __BIG_ENDIAN__
219 c = max_align(ea); 226 c = max_align(ea);
227#endif
220 if (c > nb) 228 if (c > nb)
221 c = max_align(nb); 229 c = max_align(nb);
222 err = read_mem_aligned(&b, ea, c); 230 err = read_mem_aligned(&b, ea, c);
@@ -225,7 +233,24 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
225 x = (x << (8 * c)) + b; 233 x = (x << (8 * c)) + b;
226 ea += c; 234 ea += c;
227 } 235 }
236#ifdef __LITTLE_ENDIAN__
237 switch (len) {
238 case 2:
239 *dest = byterev_2(x);
240 break;
241 case 4:
242 *dest = byterev_4(x);
243 break;
244#ifdef __powerpc64__
245 case 8:
246 *dest = byterev_8(x);
247 break;
248#endif
249 }
250#endif
251#ifdef __BIG_ENDIAN__
228 *dest = x; 252 *dest = x;
253#endif
229 return 0; 254 return 0;
230} 255}
231 256
@@ -273,9 +298,29 @@ static int __kprobes write_mem_unaligned(unsigned long val, unsigned long ea,
273 int err; 298 int err;
274 unsigned long c; 299 unsigned long c;
275 300
301#ifdef __LITTLE_ENDIAN__
302 switch (nb) {
303 case 2:
304 val = byterev_2(val);
305 break;
306 case 4:
307 val = byterev_4(val);
308 break;
309#ifdef __powerpc64__
310 case 8:
311 val = byterev_8(val);
312 break;
313#endif
314 }
315#endif
276 /* unaligned or little-endian, do this in pieces */ 316 /* unaligned or little-endian, do this in pieces */
277 for (; nb > 0; nb -= c) { 317 for (; nb > 0; nb -= c) {
318#ifdef __LITTLE_ENDIAN__
319 c = 1;
320#endif
321#ifdef __BIG_ENDIAN__
278 c = max_align(ea); 322 c = max_align(ea);
323#endif
279 if (c > nb) 324 if (c > nb)
280 c = max_align(nb); 325 c = max_align(nb);
281 err = write_mem_aligned(val >> (nb - c) * 8, ea, c); 326 err = write_mem_aligned(val >> (nb - c) * 8, ea, c);