aboutsummaryrefslogtreecommitdiffstats
path: root/arch/xtensa/kernel
diff options
context:
space:
mode:
Diffstat (limited to 'arch/xtensa/kernel')
-rw-r--r--arch/xtensa/kernel/align.S51
1 files changed, 19 insertions, 32 deletions
diff --git a/arch/xtensa/kernel/align.S b/arch/xtensa/kernel/align.S
index 904f32f05c09..2c7c13d8e91a 100644
--- a/arch/xtensa/kernel/align.S
+++ b/arch/xtensa/kernel/align.S
@@ -277,18 +277,6 @@ ENTRY(fast_unaligned)
277 /* Set target register. */ 277 /* Set target register. */
278 278
2791: 2791:
280
281#if XCHAL_HAVE_LOOPS
282 rsr a5, lend # check if we reached LEND
283 bne a7, a5, 1f
284 rsr a5, lcount # and LCOUNT != 0
285 beqz a5, 1f
286 addi a5, a5, -1 # decrement LCOUNT and set
287 rsr a7, lbeg # set PC to LBEGIN
288 wsr a5, lcount
289#endif
290
2911: wsr a7, epc1 # skip load instruction
292 extui a4, a4, INSN_T, 4 # extract target register 280 extui a4, a4, INSN_T, 4 # extract target register
293 movi a5, .Lload_table 281 movi a5, .Lload_table
294 addx8 a4, a4, a5 282 addx8 a4, a4, a5
@@ -358,17 +346,6 @@ ENTRY(fast_unaligned)
358 /* Get memory address */ 346 /* Get memory address */
359 347
3601: 3481:
361#if XCHAL_HAVE_LOOPS
362 rsr a4, lend # check if we reached LEND
363 bne a7, a4, 1f
364 rsr a4, lcount # and LCOUNT != 0
365 beqz a4, 1f
366 addi a4, a4, -1 # decrement LCOUNT and set
367 rsr a7, lbeg # set PC to LBEGIN
368 wsr a4, lcount
369#endif
370
3711: wsr a7, epc1 # skip store instruction
372 movi a4, ~3 349 movi a4, ~3
373 and a4, a4, a8 # align memory address 350 and a4, a4, a8 # align memory address
374 351
@@ -380,25 +357,25 @@ ENTRY(fast_unaligned)
380#endif 357#endif
381 358
382 __ssa8r a8 359 __ssa8r a8
383 __src_b a7, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE) 360 __src_b a8, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE)
384 __src_b a6, a6, a5 # hi-mask 0..0F..F (BE) F..F0..0 (LE) 361 __src_b a6, a6, a5 # hi-mask 0..0F..F (BE) F..F0..0 (LE)
385#ifdef UNALIGNED_USER_EXCEPTION 362#ifdef UNALIGNED_USER_EXCEPTION
386 l32e a5, a4, -8 363 l32e a5, a4, -8
387#else 364#else
388 l32i a5, a4, 0 # load lower address word 365 l32i a5, a4, 0 # load lower address word
389#endif 366#endif
390 and a5, a5, a7 # mask 367 and a5, a5, a8 # mask
391 __sh a7, a3 # shift value 368 __sh a8, a3 # shift value
392 or a5, a5, a7 # or with original value 369 or a5, a5, a8 # or with original value
393#ifdef UNALIGNED_USER_EXCEPTION 370#ifdef UNALIGNED_USER_EXCEPTION
394 s32e a5, a4, -8 371 s32e a5, a4, -8
395 l32e a7, a4, -4 372 l32e a8, a4, -4
396#else 373#else
397 s32i a5, a4, 0 # store 374 s32i a5, a4, 0 # store
398 l32i a7, a4, 4 # same for upper address word 375 l32i a8, a4, 4 # same for upper address word
399#endif 376#endif
400 __sl a5, a3 377 __sl a5, a3
401 and a6, a7, a6 378 and a6, a8, a6
402 or a6, a6, a5 379 or a6, a6, a5
403#ifdef UNALIGNED_USER_EXCEPTION 380#ifdef UNALIGNED_USER_EXCEPTION
404 s32e a6, a4, -4 381 s32e a6, a4, -4
@@ -406,9 +383,19 @@ ENTRY(fast_unaligned)
406 s32i a6, a4, 4 383 s32i a6, a4, 4
407#endif 384#endif
408 385
409 /* Done. restore stack and return */
410
411.Lexit: 386.Lexit:
387#if XCHAL_HAVE_LOOPS
388 rsr a4, lend # check if we reached LEND
389 bne a7, a4, 1f
390 rsr a4, lcount # and LCOUNT != 0
391 beqz a4, 1f
392 addi a4, a4, -1 # decrement LCOUNT and set
393 rsr a7, lbeg # set PC to LBEGIN
394 wsr a4, lcount
395#endif
396
3971: wsr a7, epc1 # skip emulated instruction
398
412 movi a4, 0 399 movi a4, 0
413 rsr a3, excsave1 400 rsr a3, excsave1
414 s32i a4, a3, EXC_TABLE_FIXUP 401 s32i a4, a3, EXC_TABLE_FIXUP