diff options
Diffstat (limited to 'arch/xtensa/kernel/align.S')
-rw-r--r-- | arch/xtensa/kernel/align.S | 38 |
1 files changed, 19 insertions, 19 deletions
diff --git a/arch/xtensa/kernel/align.S b/arch/xtensa/kernel/align.S index 33d6e9d2e83c..934ae58e2c79 100644 --- a/arch/xtensa/kernel/align.S +++ b/arch/xtensa/kernel/align.S | |||
@@ -170,15 +170,15 @@ ENTRY(fast_unaligned) | |||
170 | s32i a7, a2, PT_AREG7 | 170 | s32i a7, a2, PT_AREG7 |
171 | s32i a8, a2, PT_AREG8 | 171 | s32i a8, a2, PT_AREG8 |
172 | 172 | ||
173 | rsr a0, DEPC | 173 | rsr a0, depc |
174 | xsr a3, EXCSAVE_1 | 174 | xsr a3, excsave1 |
175 | s32i a0, a2, PT_AREG2 | 175 | s32i a0, a2, PT_AREG2 |
176 | s32i a3, a2, PT_AREG3 | 176 | s32i a3, a2, PT_AREG3 |
177 | 177 | ||
178 | /* Keep value of SAR in a0 */ | 178 | /* Keep value of SAR in a0 */ |
179 | 179 | ||
180 | rsr a0, SAR | 180 | rsr a0, sar |
181 | rsr a8, EXCVADDR # load unaligned memory address | 181 | rsr a8, excvaddr # load unaligned memory address |
182 | 182 | ||
183 | /* Now, identify one of the following load/store instructions. | 183 | /* Now, identify one of the following load/store instructions. |
184 | * | 184 | * |
@@ -197,7 +197,7 @@ ENTRY(fast_unaligned) | |||
197 | 197 | ||
198 | /* Extract the instruction that caused the unaligned access. */ | 198 | /* Extract the instruction that caused the unaligned access. */ |
199 | 199 | ||
200 | rsr a7, EPC_1 # load exception address | 200 | rsr a7, epc1 # load exception address |
201 | movi a3, ~3 | 201 | movi a3, ~3 |
202 | and a3, a3, a7 # mask lower bits | 202 | and a3, a3, a7 # mask lower bits |
203 | 203 | ||
@@ -275,16 +275,16 @@ ENTRY(fast_unaligned) | |||
275 | 1: | 275 | 1: |
276 | 276 | ||
277 | #if XCHAL_HAVE_LOOPS | 277 | #if XCHAL_HAVE_LOOPS |
278 | rsr a5, LEND # check if we reached LEND | 278 | rsr a5, lend # check if we reached LEND |
279 | bne a7, a5, 1f | 279 | bne a7, a5, 1f |
280 | rsr a5, LCOUNT # and LCOUNT != 0 | 280 | rsr a5, lcount # and LCOUNT != 0 |
281 | beqz a5, 1f | 281 | beqz a5, 1f |
282 | addi a5, a5, -1 # decrement LCOUNT and set | 282 | addi a5, a5, -1 # decrement LCOUNT and set |
283 | rsr a7, LBEG # set PC to LBEGIN | 283 | rsr a7, lbeg # set PC to LBEGIN |
284 | wsr a5, LCOUNT | 284 | wsr a5, lcount |
285 | #endif | 285 | #endif |
286 | 286 | ||
287 | 1: wsr a7, EPC_1 # skip load instruction | 287 | 1: wsr a7, epc1 # skip load instruction |
288 | extui a4, a4, INSN_T, 4 # extract target register | 288 | extui a4, a4, INSN_T, 4 # extract target register |
289 | movi a5, .Lload_table | 289 | movi a5, .Lload_table |
290 | addx8 a4, a4, a5 | 290 | addx8 a4, a4, a5 |
@@ -355,16 +355,16 @@ ENTRY(fast_unaligned) | |||
355 | 355 | ||
356 | 1: | 356 | 1: |
357 | #if XCHAL_HAVE_LOOPS | 357 | #if XCHAL_HAVE_LOOPS |
358 | rsr a4, LEND # check if we reached LEND | 358 | rsr a4, lend # check if we reached LEND |
359 | bne a7, a4, 1f | 359 | bne a7, a4, 1f |
360 | rsr a4, LCOUNT # and LCOUNT != 0 | 360 | rsr a4, lcount # and LCOUNT != 0 |
361 | beqz a4, 1f | 361 | beqz a4, 1f |
362 | addi a4, a4, -1 # decrement LCOUNT and set | 362 | addi a4, a4, -1 # decrement LCOUNT and set |
363 | rsr a7, LBEG # set PC to LBEGIN | 363 | rsr a7, lbeg # set PC to LBEGIN |
364 | wsr a4, LCOUNT | 364 | wsr a4, lcount |
365 | #endif | 365 | #endif |
366 | 366 | ||
367 | 1: wsr a7, EPC_1 # skip store instruction | 367 | 1: wsr a7, epc1 # skip store instruction |
368 | movi a4, ~3 | 368 | movi a4, ~3 |
369 | and a4, a4, a8 # align memory address | 369 | and a4, a4, a8 # align memory address |
370 | 370 | ||
@@ -406,7 +406,7 @@ ENTRY(fast_unaligned) | |||
406 | 406 | ||
407 | .Lexit: | 407 | .Lexit: |
408 | movi a4, 0 | 408 | movi a4, 0 |
409 | rsr a3, EXCSAVE_1 | 409 | rsr a3, excsave1 |
410 | s32i a4, a3, EXC_TABLE_FIXUP | 410 | s32i a4, a3, EXC_TABLE_FIXUP |
411 | 411 | ||
412 | /* Restore working register */ | 412 | /* Restore working register */ |
@@ -420,7 +420,7 @@ ENTRY(fast_unaligned) | |||
420 | 420 | ||
421 | /* restore SAR and return */ | 421 | /* restore SAR and return */ |
422 | 422 | ||
423 | wsr a0, SAR | 423 | wsr a0, sar |
424 | l32i a0, a2, PT_AREG0 | 424 | l32i a0, a2, PT_AREG0 |
425 | l32i a2, a2, PT_AREG2 | 425 | l32i a2, a2, PT_AREG2 |
426 | rfe | 426 | rfe |
@@ -438,10 +438,10 @@ ENTRY(fast_unaligned) | |||
438 | l32i a6, a2, PT_AREG6 | 438 | l32i a6, a2, PT_AREG6 |
439 | l32i a5, a2, PT_AREG5 | 439 | l32i a5, a2, PT_AREG5 |
440 | l32i a4, a2, PT_AREG4 | 440 | l32i a4, a2, PT_AREG4 |
441 | wsr a0, SAR | 441 | wsr a0, sar |
442 | mov a1, a2 | 442 | mov a1, a2 |
443 | 443 | ||
444 | rsr a0, PS | 444 | rsr a0, ps |
445 | bbsi.l a2, PS_UM_BIT, 1f # jump if user mode | 445 | bbsi.l a2, PS_UM_BIT, 1f # jump if user mode |
446 | 446 | ||
447 | movi a0, _kernel_exception | 447 | movi a0, _kernel_exception |