diff options
Diffstat (limited to 'arch/xtensa/kernel/align.S')
-rw-r--r-- | arch/xtensa/kernel/align.S | 128 |
1 files changed, 81 insertions, 47 deletions
diff --git a/arch/xtensa/kernel/align.S b/arch/xtensa/kernel/align.S index d4cef6039a5c..890004af03a9 100644 --- a/arch/xtensa/kernel/align.S +++ b/arch/xtensa/kernel/align.S | |||
@@ -8,6 +8,7 @@ | |||
8 | * this archive for more details. | 8 | * this archive for more details. |
9 | * | 9 | * |
10 | * Copyright (C) 2001 - 2005 Tensilica, Inc. | 10 | * Copyright (C) 2001 - 2005 Tensilica, Inc. |
11 | * Copyright (C) 2014 Cadence Design Systems Inc. | ||
11 | * | 12 | * |
12 | * Rewritten by Chris Zankel <chris@zankel.net> | 13 | * Rewritten by Chris Zankel <chris@zankel.net> |
13 | * | 14 | * |
@@ -174,6 +175,10 @@ ENTRY(fast_unaligned) | |||
174 | s32i a0, a2, PT_AREG2 | 175 | s32i a0, a2, PT_AREG2 |
175 | s32i a3, a2, PT_AREG3 | 176 | s32i a3, a2, PT_AREG3 |
176 | 177 | ||
178 | rsr a3, excsave1 | ||
179 | movi a4, fast_unaligned_fixup | ||
180 | s32i a4, a3, EXC_TABLE_FIXUP | ||
181 | |||
177 | /* Keep value of SAR in a0 */ | 182 | /* Keep value of SAR in a0 */ |
178 | 183 | ||
179 | rsr a0, sar | 184 | rsr a0, sar |
@@ -225,10 +230,6 @@ ENTRY(fast_unaligned) | |||
225 | addx8 a5, a6, a5 | 230 | addx8 a5, a6, a5 |
226 | jx a5 # jump into table | 231 | jx a5 # jump into table |
227 | 232 | ||
228 | /* Invalid instruction, CRITICAL! */ | ||
229 | .Linvalid_instruction_load: | ||
230 | j .Linvalid_instruction | ||
231 | |||
232 | /* Load: Load memory address. */ | 233 | /* Load: Load memory address. */ |
233 | 234 | ||
234 | .Lload: movi a3, ~3 | 235 | .Lload: movi a3, ~3 |
@@ -272,18 +273,6 @@ ENTRY(fast_unaligned) | |||
272 | /* Set target register. */ | 273 | /* Set target register. */ |
273 | 274 | ||
274 | 1: | 275 | 1: |
275 | |||
276 | #if XCHAL_HAVE_LOOPS | ||
277 | rsr a5, lend # check if we reached LEND | ||
278 | bne a7, a5, 1f | ||
279 | rsr a5, lcount # and LCOUNT != 0 | ||
280 | beqz a5, 1f | ||
281 | addi a5, a5, -1 # decrement LCOUNT and set | ||
282 | rsr a7, lbeg # set PC to LBEGIN | ||
283 | wsr a5, lcount | ||
284 | #endif | ||
285 | |||
286 | 1: wsr a7, epc1 # skip load instruction | ||
287 | extui a4, a4, INSN_T, 4 # extract target register | 276 | extui a4, a4, INSN_T, 4 # extract target register |
288 | movi a5, .Lload_table | 277 | movi a5, .Lload_table |
289 | addx8 a4, a4, a5 | 278 | addx8 a4, a4, a5 |
@@ -326,6 +315,35 @@ ENTRY(fast_unaligned) | |||
326 | mov a3, a14 ; _j 1f; .align 8 | 315 | mov a3, a14 ; _j 1f; .align 8 |
327 | mov a3, a15 ; _j 1f; .align 8 | 316 | mov a3, a15 ; _j 1f; .align 8 |
328 | 317 | ||
318 | /* We cannot handle this exception. */ | ||
319 | |||
320 | .extern _kernel_exception | ||
321 | .Linvalid_instruction_load: | ||
322 | .Linvalid_instruction_store: | ||
323 | |||
324 | movi a4, 0 | ||
325 | rsr a3, excsave1 | ||
326 | s32i a4, a3, EXC_TABLE_FIXUP | ||
327 | |||
328 | /* Restore a4...a8 and SAR, set SP, and jump to default exception. */ | ||
329 | |||
330 | l32i a8, a2, PT_AREG8 | ||
331 | l32i a7, a2, PT_AREG7 | ||
332 | l32i a6, a2, PT_AREG6 | ||
333 | l32i a5, a2, PT_AREG5 | ||
334 | l32i a4, a2, PT_AREG4 | ||
335 | wsr a0, sar | ||
336 | mov a1, a2 | ||
337 | |||
338 | rsr a0, ps | ||
339 | bbsi.l a0, PS_UM_BIT, 2f # jump if user mode | ||
340 | |||
341 | movi a0, _kernel_exception | ||
342 | jx a0 | ||
343 | |||
344 | 2: movi a0, _user_exception | ||
345 | jx a0 | ||
346 | |||
329 | 1: # a7: instruction pointer, a4: instruction, a3: value | 347 | 1: # a7: instruction pointer, a4: instruction, a3: value |
330 | 348 | ||
331 | movi a6, 0 # mask: ffffffff:00000000 | 349 | movi a6, 0 # mask: ffffffff:00000000 |
@@ -353,17 +371,6 @@ ENTRY(fast_unaligned) | |||
353 | /* Get memory address */ | 371 | /* Get memory address */ |
354 | 372 | ||
355 | 1: | 373 | 1: |
356 | #if XCHAL_HAVE_LOOPS | ||
357 | rsr a4, lend # check if we reached LEND | ||
358 | bne a7, a4, 1f | ||
359 | rsr a4, lcount # and LCOUNT != 0 | ||
360 | beqz a4, 1f | ||
361 | addi a4, a4, -1 # decrement LCOUNT and set | ||
362 | rsr a7, lbeg # set PC to LBEGIN | ||
363 | wsr a4, lcount | ||
364 | #endif | ||
365 | |||
366 | 1: wsr a7, epc1 # skip store instruction | ||
367 | movi a4, ~3 | 374 | movi a4, ~3 |
368 | and a4, a4, a8 # align memory address | 375 | and a4, a4, a8 # align memory address |
369 | 376 | ||
@@ -375,25 +382,25 @@ ENTRY(fast_unaligned) | |||
375 | #endif | 382 | #endif |
376 | 383 | ||
377 | __ssa8r a8 | 384 | __ssa8r a8 |
378 | __src_b a7, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE) | 385 | __src_b a8, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE) |
379 | __src_b a6, a6, a5 # hi-mask 0..0F..F (BE) F..F0..0 (LE) | 386 | __src_b a6, a6, a5 # hi-mask 0..0F..F (BE) F..F0..0 (LE) |
380 | #ifdef UNALIGNED_USER_EXCEPTION | 387 | #ifdef UNALIGNED_USER_EXCEPTION |
381 | l32e a5, a4, -8 | 388 | l32e a5, a4, -8 |
382 | #else | 389 | #else |
383 | l32i a5, a4, 0 # load lower address word | 390 | l32i a5, a4, 0 # load lower address word |
384 | #endif | 391 | #endif |
385 | and a5, a5, a7 # mask | 392 | and a5, a5, a8 # mask |
386 | __sh a7, a3 # shift value | 393 | __sh a8, a3 # shift value |
387 | or a5, a5, a7 # or with original value | 394 | or a5, a5, a8 # or with original value |
388 | #ifdef UNALIGNED_USER_EXCEPTION | 395 | #ifdef UNALIGNED_USER_EXCEPTION |
389 | s32e a5, a4, -8 | 396 | s32e a5, a4, -8 |
390 | l32e a7, a4, -4 | 397 | l32e a8, a4, -4 |
391 | #else | 398 | #else |
392 | s32i a5, a4, 0 # store | 399 | s32i a5, a4, 0 # store |
393 | l32i a7, a4, 4 # same for upper address word | 400 | l32i a8, a4, 4 # same for upper address word |
394 | #endif | 401 | #endif |
395 | __sl a5, a3 | 402 | __sl a5, a3 |
396 | and a6, a7, a6 | 403 | and a6, a8, a6 |
397 | or a6, a6, a5 | 404 | or a6, a6, a5 |
398 | #ifdef UNALIGNED_USER_EXCEPTION | 405 | #ifdef UNALIGNED_USER_EXCEPTION |
399 | s32e a6, a4, -4 | 406 | s32e a6, a4, -4 |
@@ -401,9 +408,27 @@ ENTRY(fast_unaligned) | |||
401 | s32i a6, a4, 4 | 408 | s32i a6, a4, 4 |
402 | #endif | 409 | #endif |
403 | 410 | ||
404 | /* Done. restore stack and return */ | ||
405 | |||
406 | .Lexit: | 411 | .Lexit: |
412 | #if XCHAL_HAVE_LOOPS | ||
413 | rsr a4, lend # check if we reached LEND | ||
414 | bne a7, a4, 1f | ||
415 | rsr a4, lcount # and LCOUNT != 0 | ||
416 | beqz a4, 1f | ||
417 | addi a4, a4, -1 # decrement LCOUNT and set | ||
418 | rsr a7, lbeg # set PC to LBEGIN | ||
419 | wsr a4, lcount | ||
420 | #endif | ||
421 | |||
422 | 1: wsr a7, epc1 # skip emulated instruction | ||
423 | |||
424 | /* Update icount if we're single-stepping in userspace. */ | ||
425 | rsr a4, icountlevel | ||
426 | beqz a4, 1f | ||
427 | bgeui a4, LOCKLEVEL + 1, 1f | ||
428 | rsr a4, icount | ||
429 | addi a4, a4, 1 | ||
430 | wsr a4, icount | ||
431 | 1: | ||
407 | movi a4, 0 | 432 | movi a4, 0 |
408 | rsr a3, excsave1 | 433 | rsr a3, excsave1 |
409 | s32i a4, a3, EXC_TABLE_FIXUP | 434 | s32i a4, a3, EXC_TABLE_FIXUP |
@@ -424,31 +449,40 @@ ENTRY(fast_unaligned) | |||
424 | l32i a2, a2, PT_AREG2 | 449 | l32i a2, a2, PT_AREG2 |
425 | rfe | 450 | rfe |
426 | 451 | ||
427 | /* We cannot handle this exception. */ | 452 | ENDPROC(fast_unaligned) |
428 | 453 | ||
429 | .extern _kernel_exception | 454 | ENTRY(fast_unaligned_fixup) |
430 | .Linvalid_instruction_store: | ||
431 | .Linvalid_instruction: | ||
432 | 455 | ||
433 | /* Restore a4...a8 and SAR, set SP, and jump to default exception. */ | 456 | l32i a2, a3, EXC_TABLE_DOUBLE_SAVE |
457 | wsr a3, excsave1 | ||
434 | 458 | ||
435 | l32i a8, a2, PT_AREG8 | 459 | l32i a8, a2, PT_AREG8 |
436 | l32i a7, a2, PT_AREG7 | 460 | l32i a7, a2, PT_AREG7 |
437 | l32i a6, a2, PT_AREG6 | 461 | l32i a6, a2, PT_AREG6 |
438 | l32i a5, a2, PT_AREG5 | 462 | l32i a5, a2, PT_AREG5 |
439 | l32i a4, a2, PT_AREG4 | 463 | l32i a4, a2, PT_AREG4 |
464 | l32i a0, a2, PT_AREG2 | ||
465 | xsr a0, depc # restore depc and a0 | ||
440 | wsr a0, sar | 466 | wsr a0, sar |
441 | mov a1, a2 | 467 | |
468 | rsr a0, exccause | ||
469 | s32i a0, a2, PT_DEPC # mark as a regular exception | ||
442 | 470 | ||
443 | rsr a0, ps | 471 | rsr a0, ps |
444 | bbsi.l a2, PS_UM_BIT, 1f # jump if user mode | 472 | bbsi.l a0, PS_UM_BIT, 1f # jump if user mode |
445 | 473 | ||
446 | movi a0, _kernel_exception | 474 | rsr a0, exccause |
475 | addx4 a0, a0, a3 # find entry in table | ||
476 | l32i a0, a0, EXC_TABLE_FAST_KERNEL # load handler | ||
477 | l32i a3, a2, PT_AREG3 | ||
447 | jx a0 | 478 | jx a0 |
448 | 479 | 1: | |
449 | 1: movi a0, _user_exception | 480 | rsr a0, exccause |
481 | addx4 a0, a0, a3 # find entry in table | ||
482 | l32i a0, a0, EXC_TABLE_FAST_USER # load handler | ||
483 | l32i a3, a2, PT_AREG3 | ||
450 | jx a0 | 484 | jx a0 |
451 | 485 | ||
452 | ENDPROC(fast_unaligned) | 486 | ENDPROC(fast_unaligned_fixup) |
453 | 487 | ||
454 | #endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */ | 488 | #endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */ |