diff options
author | Jean Pihet <j-pihet@ti.com> | 2010-12-18 10:49:57 -0500 |
---|---|---|
committer | Kevin Hilman <khilman@deeprootsystems.com> | 2010-12-21 17:45:59 -0500 |
commit | bb1c9034b3ce7f29d3d178a87b42b767611d6574 (patch) | |
tree | 9ce16b76ed4d8045cc0fcac33665d70bf14dbde5 | |
parent | 8352129166b8270253a746f336a4429b349b023d (diff) |
OMAP3: ASM sleep code format rework
Cosmetic fixes to the code:
- white spaces and tabs,
- alignement,
- comments rephrase and typos,
- multi-line comments
Tested on N900 and Beagleboard with full RET and OFF modes,
using cpuidle and suspend.
Signed-off-by: Jean Pihet <j-pihet@ti.com>
Acked-by: Santosh Shilimkar <santosh.shilimkar@ti.com>
Tested-by: Nishanth Menon <nm@ti.com>
Signed-off-by: Kevin Hilman <khilman@deeprootsystems.com>
-rw-r--r-- | arch/arm/mach-omap2/sleep34xx.S | 224 |
1 files changed, 117 insertions, 107 deletions
diff --git a/arch/arm/mach-omap2/sleep34xx.S b/arch/arm/mach-omap2/sleep34xx.S index 17dfe30db3a7..e3b5cd76c54c 100644 --- a/arch/arm/mach-omap2/sleep34xx.S +++ b/arch/arm/mach-omap2/sleep34xx.S | |||
@@ -1,6 +1,4 @@ | |||
1 | /* | 1 | /* |
2 | * linux/arch/arm/mach-omap2/sleep.S | ||
3 | * | ||
4 | * (C) Copyright 2007 | 2 | * (C) Copyright 2007 |
5 | * Texas Instruments | 3 | * Texas Instruments |
6 | * Karthik Dasu <karthik-dp@ti.com> | 4 | * Karthik Dasu <karthik-dp@ti.com> |
@@ -81,20 +79,20 @@ | |||
81 | .text | 79 | .text |
82 | /* Function call to get the restore pointer for resume from OFF */ | 80 | /* Function call to get the restore pointer for resume from OFF */ |
83 | ENTRY(get_restore_pointer) | 81 | ENTRY(get_restore_pointer) |
84 | stmfd sp!, {lr} @ save registers on stack | 82 | stmfd sp!, {lr} @ save registers on stack |
85 | adr r0, restore | 83 | adr r0, restore |
86 | ldmfd sp!, {pc} @ restore regs and return | 84 | ldmfd sp!, {pc} @ restore regs and return |
87 | ENTRY(get_restore_pointer_sz) | 85 | ENTRY(get_restore_pointer_sz) |
88 | .word . - get_restore_pointer | 86 | .word . - get_restore_pointer |
89 | 87 | ||
90 | .text | 88 | .text |
91 | /* Function call to get the restore pointer for 3630 resume from OFF */ | 89 | /* Function call to get the restore pointer for 3630 resume from OFF */ |
92 | ENTRY(get_omap3630_restore_pointer) | 90 | ENTRY(get_omap3630_restore_pointer) |
93 | stmfd sp!, {lr} @ save registers on stack | 91 | stmfd sp!, {lr} @ save registers on stack |
94 | adr r0, restore_3630 | 92 | adr r0, restore_3630 |
95 | ldmfd sp!, {pc} @ restore regs and return | 93 | ldmfd sp!, {pc} @ restore regs and return |
96 | ENTRY(get_omap3630_restore_pointer_sz) | 94 | ENTRY(get_omap3630_restore_pointer_sz) |
97 | .word . - get_omap3630_restore_pointer | 95 | .word . - get_omap3630_restore_pointer |
98 | 96 | ||
99 | .text | 97 | .text |
100 | /* Function call to get the restore pointer for ES3 to resume from OFF */ | 98 | /* Function call to get the restore pointer for ES3 to resume from OFF */ |
@@ -112,16 +110,16 @@ ENTRY(get_es3_restore_pointer_sz) | |||
112 | * place on 3630. Hopefully some version in the future may not need this. | 110 | * place on 3630. Hopefully some version in the future may not need this. |
113 | */ | 111 | */ |
114 | ENTRY(enable_omap3630_toggle_l2_on_restore) | 112 | ENTRY(enable_omap3630_toggle_l2_on_restore) |
115 | stmfd sp!, {lr} @ save registers on stack | 113 | stmfd sp!, {lr} @ save registers on stack |
116 | /* Setup so that we will disable and enable l2 */ | 114 | /* Setup so that we will disable and enable l2 */ |
117 | mov r1, #0x1 | 115 | mov r1, #0x1 |
118 | str r1, l2dis_3630 | 116 | str r1, l2dis_3630 |
119 | ldmfd sp!, {pc} @ restore regs and return | 117 | ldmfd sp!, {pc} @ restore regs and return |
120 | 118 | ||
119 | .text | ||
121 | /* Function to call rom code to save secure ram context */ | 120 | /* Function to call rom code to save secure ram context */ |
122 | ENTRY(save_secure_ram_context) | 121 | ENTRY(save_secure_ram_context) |
123 | stmfd sp!, {r1-r12, lr} @ save registers on stack | 122 | stmfd sp!, {r1-r12, lr} @ save registers on stack |
124 | |||
125 | adr r3, api_params @ r3 points to parameters | 123 | adr r3, api_params @ r3 points to parameters |
126 | str r0, [r3,#0x4] @ r0 has sdram address | 124 | str r0, [r3,#0x4] @ r0 has sdram address |
127 | ldr r12, high_mask | 125 | ldr r12, high_mask |
@@ -165,14 +163,14 @@ ENTRY(save_secure_ram_context_sz) | |||
165 | * | 163 | * |
166 | * | 164 | * |
167 | * Notes: | 165 | * Notes: |
168 | * - this code gets copied to internal SRAM at boot. The execution pointer | 166 | * - this code gets copied to internal SRAM at boot and after wake-up |
169 | * in SRAM is _omap_sram_idle. | 167 | * from OFF mode. The execution pointer in SRAM is _omap_sram_idle. |
170 | * - when the OMAP wakes up it continues at different execution points | 168 | * - when the OMAP wakes up it continues at different execution points |
171 | * depending on the low power mode (non-OFF vs OFF modes), | 169 | * depending on the low power mode (non-OFF vs OFF modes), |
172 | * cf. 'Resume path for xxx mode' comments. | 170 | * cf. 'Resume path for xxx mode' comments. |
173 | */ | 171 | */ |
174 | ENTRY(omap34xx_cpu_suspend) | 172 | ENTRY(omap34xx_cpu_suspend) |
175 | stmfd sp!, {r0-r12, lr} @ save registers on stack | 173 | stmfd sp!, {r0-r12, lr} @ save registers on stack |
176 | 174 | ||
177 | /* | 175 | /* |
178 | * r0 contains restore pointer in sdram | 176 | * r0 contains restore pointer in sdram |
@@ -280,9 +278,9 @@ clean_l2: | |||
280 | * - should be faster and will change with kernel | 278 | * - should be faster and will change with kernel |
281 | * - 'might' have to copy address, load and jump to it | 279 | * - 'might' have to copy address, load and jump to it |
282 | */ | 280 | */ |
283 | ldr r1, kernel_flush | 281 | ldr r1, kernel_flush |
284 | mov lr, pc | 282 | mov lr, pc |
285 | bx r1 | 283 | bx r1 |
286 | 284 | ||
287 | omap3_do_wfi: | 285 | omap3_do_wfi: |
288 | ldr r4, sdrc_power @ read the SDRC_POWER register | 286 | ldr r4, sdrc_power @ read the SDRC_POWER register |
@@ -375,18 +373,18 @@ restore_3630: | |||
375 | /* Fall through to common code for the remaining logic */ | 373 | /* Fall through to common code for the remaining logic */ |
376 | 374 | ||
377 | restore: | 375 | restore: |
378 | /* | 376 | /* |
379 | * Check what was the reason for mpu reset and store the reason in r9: | 377 | * Check what was the reason for mpu reset and store the reason in r9: |
380 | * 0 - No context lost | 378 | * 0 - No context lost |
381 | * 1 - Only L1 and logic lost | 379 | * 1 - Only L1 and logic lost |
382 | * 2 - Only L2 lost - In this case, we wont be here | 380 | * 2 - Only L2 lost - In this case, we wont be here |
383 | * 3 - Both L1 and L2 lost | 381 | * 3 - Both L1 and L2 lost |
384 | */ | 382 | */ |
385 | ldr r1, pm_pwstctrl_mpu | 383 | ldr r1, pm_pwstctrl_mpu |
386 | ldr r2, [r1] | 384 | ldr r2, [r1] |
387 | and r2, r2, #0x3 | 385 | and r2, r2, #0x3 |
388 | cmp r2, #0x0 @ Check if target power state was OFF or RET | 386 | cmp r2, #0x0 @ Check if target power state was OFF or RET |
389 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost | 387 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost |
390 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation | 388 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation |
391 | bne logic_l1_restore | 389 | bne logic_l1_restore |
392 | 390 | ||
@@ -402,71 +400,74 @@ skipl2dis: | |||
402 | and r1, #0x700 | 400 | and r1, #0x700 |
403 | cmp r1, #0x300 | 401 | cmp r1, #0x300 |
404 | beq l2_inv_gp | 402 | beq l2_inv_gp |
405 | mov r0, #40 @ set service ID for PPA | 403 | mov r0, #40 @ set service ID for PPA |
406 | mov r12, r0 @ copy secure Service ID in r12 | 404 | mov r12, r0 @ copy secure Service ID in r12 |
407 | mov r1, #0 @ set task id for ROM code in r1 | 405 | mov r1, #0 @ set task id for ROM code in r1 |
408 | mov r2, #4 @ set some flags in r2, r6 | 406 | mov r2, #4 @ set some flags in r2, r6 |
409 | mov r6, #0xff | 407 | mov r6, #0xff |
410 | adr r3, l2_inv_api_params @ r3 points to dummy parameters | 408 | adr r3, l2_inv_api_params @ r3 points to dummy parameters |
411 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 409 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
412 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 410 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
413 | .word 0xE1600071 @ call SMI monitor (smi #1) | 411 | .word 0xE1600071 @ call SMI monitor (smi #1) |
414 | /* Write to Aux control register to set some bits */ | 412 | /* Write to Aux control register to set some bits */ |
415 | mov r0, #42 @ set service ID for PPA | 413 | mov r0, #42 @ set service ID for PPA |
416 | mov r12, r0 @ copy secure Service ID in r12 | 414 | mov r12, r0 @ copy secure Service ID in r12 |
417 | mov r1, #0 @ set task id for ROM code in r1 | 415 | mov r1, #0 @ set task id for ROM code in r1 |
418 | mov r2, #4 @ set some flags in r2, r6 | 416 | mov r2, #4 @ set some flags in r2, r6 |
419 | mov r6, #0xff | 417 | mov r6, #0xff |
420 | ldr r4, scratchpad_base | 418 | ldr r4, scratchpad_base |
421 | ldr r3, [r4, #0xBC] @ r3 points to parameters | 419 | ldr r3, [r4, #0xBC] @ r3 points to parameters |
422 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 420 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
423 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 421 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
424 | .word 0xE1600071 @ call SMI monitor (smi #1) | 422 | .word 0xE1600071 @ call SMI monitor (smi #1) |
425 | 423 | ||
426 | #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE | 424 | #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE |
427 | /* Restore L2 aux control register */ | 425 | /* Restore L2 aux control register */ |
428 | @ set service ID for PPA | 426 | @ set service ID for PPA |
429 | mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID | 427 | mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID |
430 | mov r12, r0 @ copy service ID in r12 | 428 | mov r12, r0 @ copy service ID in r12 |
431 | mov r1, #0 @ set task ID for ROM code in r1 | 429 | mov r1, #0 @ set task ID for ROM code in r1 |
432 | mov r2, #4 @ set some flags in r2, r6 | 430 | mov r2, #4 @ set some flags in r2, r6 |
433 | mov r6, #0xff | 431 | mov r6, #0xff |
434 | ldr r4, scratchpad_base | 432 | ldr r4, scratchpad_base |
435 | ldr r3, [r4, #0xBC] | 433 | ldr r3, [r4, #0xBC] |
436 | adds r3, r3, #8 @ r3 points to parameters | 434 | adds r3, r3, #8 @ r3 points to parameters |
437 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 435 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
438 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 436 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
439 | .word 0xE1600071 @ call SMI monitor (smi #1) | 437 | .word 0xE1600071 @ call SMI monitor (smi #1) |
440 | #endif | 438 | #endif |
441 | b logic_l1_restore | 439 | b logic_l1_restore |
440 | |||
442 | l2_inv_api_params: | 441 | l2_inv_api_params: |
443 | .word 0x1, 0x00 | 442 | .word 0x1, 0x00 |
444 | l2_inv_gp: | 443 | l2_inv_gp: |
445 | /* Execute smi to invalidate L2 cache */ | 444 | /* Execute smi to invalidate L2 cache */ |
446 | mov r12, #0x1 @ set up to invalide L2 | 445 | mov r12, #0x1 @ set up to invalidate L2 |
447 | smi: .word 0xE1600070 @ Call SMI monitor (smieq) | 446 | .word 0xE1600070 @ Call SMI monitor (smieq) |
448 | /* Write to Aux control register to set some bits */ | 447 | /* Write to Aux control register to set some bits */ |
449 | ldr r4, scratchpad_base | 448 | ldr r4, scratchpad_base |
450 | ldr r3, [r4,#0xBC] | 449 | ldr r3, [r4,#0xBC] |
451 | ldr r0, [r3,#4] | 450 | ldr r0, [r3,#4] |
452 | mov r12, #0x3 | 451 | mov r12, #0x3 |
453 | .word 0xE1600070 @ Call SMI monitor (smieq) | 452 | .word 0xE1600070 @ Call SMI monitor (smieq) |
454 | ldr r4, scratchpad_base | 453 | ldr r4, scratchpad_base |
455 | ldr r3, [r4,#0xBC] | 454 | ldr r3, [r4,#0xBC] |
456 | ldr r0, [r3,#12] | 455 | ldr r0, [r3,#12] |
457 | mov r12, #0x2 | 456 | mov r12, #0x2 |
458 | .word 0xE1600070 @ Call SMI monitor (smieq) | 457 | .word 0xE1600070 @ Call SMI monitor (smieq) |
459 | logic_l1_restore: | 458 | logic_l1_restore: |
460 | ldr r1, l2dis_3630 | 459 | ldr r1, l2dis_3630 |
461 | cmp r1, #0x1 @ Do we need to re-enable L2 on 3630? | 460 | cmp r1, #0x1 @ Test if L2 re-enable needed on 3630 |
462 | bne skipl2reen | 461 | bne skipl2reen |
463 | mrc p15, 0, r1, c1, c0, 1 | 462 | mrc p15, 0, r1, c1, c0, 1 |
464 | orr r1, r1, #2 @ re-enable L2 cache | 463 | orr r1, r1, #2 @ re-enable L2 cache |
465 | mcr p15, 0, r1, c1, c0, 1 | 464 | mcr p15, 0, r1, c1, c0, 1 |
466 | skipl2reen: | 465 | skipl2reen: |
467 | mov r1, #0 | 466 | mov r1, #0 |
468 | /* Invalidate all instruction caches to PoU | 467 | /* |
469 | * and flush branch target cache */ | 468 | * Invalidate all instruction caches to PoU |
469 | * and flush branch target cache | ||
470 | */ | ||
470 | mcr p15, 0, r1, c7, c5, 0 | 471 | mcr p15, 0, r1, c7, c5, 0 |
471 | 472 | ||
472 | ldr r4, scratchpad_base | 473 | ldr r4, scratchpad_base |
@@ -487,33 +488,33 @@ skipl2reen: | |||
487 | MCR p15, 0, r6, c2, c0, 1 | 488 | MCR p15, 0, r6, c2, c0, 1 |
488 | /* Translation table base control register */ | 489 | /* Translation table base control register */ |
489 | MCR p15, 0, r7, c2, c0, 2 | 490 | MCR p15, 0, r7, c2, c0, 2 |
490 | /*domain access Control Register */ | 491 | /* Domain access Control Register */ |
491 | MCR p15, 0, r8, c3, c0, 0 | 492 | MCR p15, 0, r8, c3, c0, 0 |
492 | /* data fault status Register */ | 493 | /* Data fault status Register */ |
493 | MCR p15, 0, r9, c5, c0, 0 | 494 | MCR p15, 0, r9, c5, c0, 0 |
494 | 495 | ||
495 | ldmia r3!,{r4-r8} | 496 | ldmia r3!,{r4-r8} |
496 | /* instruction fault status Register */ | 497 | /* Instruction fault status Register */ |
497 | MCR p15, 0, r4, c5, c0, 1 | 498 | MCR p15, 0, r4, c5, c0, 1 |
498 | /*Data Auxiliary Fault Status Register */ | 499 | /* Data Auxiliary Fault Status Register */ |
499 | MCR p15, 0, r5, c5, c1, 0 | 500 | MCR p15, 0, r5, c5, c1, 0 |
500 | /*Instruction Auxiliary Fault Status Register*/ | 501 | /* Instruction Auxiliary Fault Status Register*/ |
501 | MCR p15, 0, r6, c5, c1, 1 | 502 | MCR p15, 0, r6, c5, c1, 1 |
502 | /*Data Fault Address Register */ | 503 | /* Data Fault Address Register */ |
503 | MCR p15, 0, r7, c6, c0, 0 | 504 | MCR p15, 0, r7, c6, c0, 0 |
504 | /*Instruction Fault Address Register*/ | 505 | /* Instruction Fault Address Register*/ |
505 | MCR p15, 0, r8, c6, c0, 2 | 506 | MCR p15, 0, r8, c6, c0, 2 |
506 | ldmia r3!,{r4-r7} | 507 | ldmia r3!,{r4-r7} |
507 | 508 | ||
508 | /* user r/w thread and process ID */ | 509 | /* User r/w thread and process ID */ |
509 | MCR p15, 0, r4, c13, c0, 2 | 510 | MCR p15, 0, r4, c13, c0, 2 |
510 | /* user ro thread and process ID */ | 511 | /* User ro thread and process ID */ |
511 | MCR p15, 0, r5, c13, c0, 3 | 512 | MCR p15, 0, r5, c13, c0, 3 |
512 | /*Privileged only thread and process ID */ | 513 | /* Privileged only thread and process ID */ |
513 | MCR p15, 0, r6, c13, c0, 4 | 514 | MCR p15, 0, r6, c13, c0, 4 |
514 | /* cache size selection */ | 515 | /* Cache size selection */ |
515 | MCR p15, 2, r7, c0, c0, 0 | 516 | MCR p15, 2, r7, c0, c0, 0 |
516 | ldmia r3!,{r4-r8} | 517 | ldmia r3!,{r4-r8} |
517 | /* Data TLB lockdown registers */ | 518 | /* Data TLB lockdown registers */ |
518 | MCR p15, 0, r4, c10, c0, 0 | 519 | MCR p15, 0, r4, c10, c0, 0 |
519 | /* Instruction TLB lockdown registers */ | 520 | /* Instruction TLB lockdown registers */ |
@@ -525,26 +526,27 @@ skipl2reen: | |||
525 | /* Context PID */ | 526 | /* Context PID */ |
526 | MCR p15, 0, r8, c13, c0, 1 | 527 | MCR p15, 0, r8, c13, c0, 1 |
527 | 528 | ||
528 | ldmia r3!,{r4-r5} | 529 | ldmia r3!,{r4-r5} |
529 | /* primary memory remap register */ | 530 | /* Primary memory remap register */ |
530 | MCR p15, 0, r4, c10, c2, 0 | 531 | MCR p15, 0, r4, c10, c2, 0 |
531 | /*normal memory remap register */ | 532 | /* Normal memory remap register */ |
532 | MCR p15, 0, r5, c10, c2, 1 | 533 | MCR p15, 0, r5, c10, c2, 1 |
533 | 534 | ||
534 | /* Restore cpsr */ | 535 | /* Restore cpsr */ |
535 | ldmia r3!,{r4} /*load CPSR from SDRAM*/ | 536 | ldmia r3!,{r4} @ load CPSR from SDRAM |
536 | msr cpsr, r4 /*store cpsr */ | 537 | msr cpsr, r4 @ store cpsr |
537 | 538 | ||
538 | /* Enabling MMU here */ | 539 | /* Enabling MMU here */ |
539 | mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ | 540 | mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl |
540 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ | 541 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */ |
541 | and r7, #0x7 | 542 | and r7, #0x7 |
542 | cmp r7, #0x0 | 543 | cmp r7, #0x0 |
543 | beq usettbr0 | 544 | beq usettbr0 |
544 | ttbr_error: | 545 | ttbr_error: |
545 | /* More work needs to be done to support N[0:2] value other than 0 | 546 | /* |
546 | * So looping here so that the error can be detected | 547 | * More work needs to be done to support N[0:2] value other than 0 |
547 | */ | 548 | * So looping here so that the error can be detected |
549 | */ | ||
548 | b ttbr_error | 550 | b ttbr_error |
549 | usettbr0: | 551 | usettbr0: |
550 | mrc p15, 0, r2, c2, c0, 0 | 552 | mrc p15, 0, r2, c2, c0, 0 |
@@ -552,21 +554,25 @@ usettbr0: | |||
552 | and r2, r5 | 554 | and r2, r5 |
553 | mov r4, pc | 555 | mov r4, pc |
554 | ldr r5, table_index_mask | 556 | ldr r5, table_index_mask |
555 | and r4, r5 /* r4 = 31 to 20 bits of pc */ | 557 | and r4, r5 @ r4 = 31 to 20 bits of pc |
556 | /* Extract the value to be written to table entry */ | 558 | /* Extract the value to be written to table entry */ |
557 | ldr r1, table_entry | 559 | ldr r1, table_entry |
558 | add r1, r1, r4 /* r1 has value to be written to table entry*/ | 560 | /* r1 has the value to be written to table entry*/ |
561 | add r1, r1, r4 | ||
559 | /* Getting the address of table entry to modify */ | 562 | /* Getting the address of table entry to modify */ |
560 | lsr r4, #18 | 563 | lsr r4, #18 |
561 | add r2, r4 /* r2 has the location which needs to be modified */ | 564 | /* r2 has the location which needs to be modified */ |
565 | add r2, r4 | ||
562 | /* Storing previous entry of location being modified */ | 566 | /* Storing previous entry of location being modified */ |
563 | ldr r5, scratchpad_base | 567 | ldr r5, scratchpad_base |
564 | ldr r4, [r2] | 568 | ldr r4, [r2] |
565 | str r4, [r5, #0xC0] | 569 | str r4, [r5, #0xC0] |
566 | /* Modify the table entry */ | 570 | /* Modify the table entry */ |
567 | str r1, [r2] | 571 | str r1, [r2] |
568 | /* Storing address of entry being modified | 572 | /* |
569 | * - will be restored after enabling MMU */ | 573 | * Storing address of entry being modified |
574 | * - will be restored after enabling MMU | ||
575 | */ | ||
570 | ldr r5, scratchpad_base | 576 | ldr r5, scratchpad_base |
571 | str r2, [r5, #0xC4] | 577 | str r2, [r5, #0xC4] |
572 | 578 | ||
@@ -575,8 +581,11 @@ usettbr0: | |||
575 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array | 581 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array |
576 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB | 582 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB |
577 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB | 583 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB |
578 | /* Restore control register but dont enable caches here*/ | 584 | /* |
579 | /* Caches will be enabled after restoring MMU table entry */ | 585 | * Restore control register. This enables the MMU. |
586 | * The caches and prediction are not enabled here, they | ||
587 | * will be enabled after restoring the MMU table entry. | ||
588 | */ | ||
580 | ldmia r3!, {r4} | 589 | ldmia r3!, {r4} |
581 | /* Store previous value of control register in scratchpad */ | 590 | /* Store previous value of control register in scratchpad */ |
582 | str r4, [r5, #0xC8] | 591 | str r4, [r5, #0xC8] |
@@ -589,7 +598,7 @@ usettbr0: | |||
589 | * == Exit point from OFF mode == | 598 | * == Exit point from OFF mode == |
590 | * ============================== | 599 | * ============================== |
591 | */ | 600 | */ |
592 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 601 | ldmfd sp!, {r0-r12, pc} @ restore regs and return |
593 | 602 | ||
594 | 603 | ||
595 | /* | 604 | /* |
@@ -655,55 +664,56 @@ ENTRY(es3_sdrc_fix_sz) | |||
655 | /* Make sure SDRC accesses are ok */ | 664 | /* Make sure SDRC accesses are ok */ |
656 | wait_sdrc_ok: | 665 | wait_sdrc_ok: |
657 | 666 | ||
658 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */ | 667 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */ |
659 | ldr r4, cm_idlest_ckgen | 668 | ldr r4, cm_idlest_ckgen |
660 | wait_dpll3_lock: | 669 | wait_dpll3_lock: |
661 | ldr r5, [r4] | 670 | ldr r5, [r4] |
662 | tst r5, #1 | 671 | tst r5, #1 |
663 | beq wait_dpll3_lock | 672 | beq wait_dpll3_lock |
664 | 673 | ||
665 | ldr r4, cm_idlest1_core | 674 | ldr r4, cm_idlest1_core |
666 | wait_sdrc_ready: | 675 | wait_sdrc_ready: |
667 | ldr r5, [r4] | 676 | ldr r5, [r4] |
668 | tst r5, #0x2 | 677 | tst r5, #0x2 |
669 | bne wait_sdrc_ready | 678 | bne wait_sdrc_ready |
670 | /* allow DLL powerdown upon hw idle req */ | 679 | /* allow DLL powerdown upon hw idle req */ |
671 | ldr r4, sdrc_power | 680 | ldr r4, sdrc_power |
672 | ldr r5, [r4] | 681 | ldr r5, [r4] |
673 | bic r5, r5, #0x40 | 682 | bic r5, r5, #0x40 |
674 | str r5, [r4] | 683 | str r5, [r4] |
675 | is_dll_in_lock_mode: | ||
676 | 684 | ||
677 | /* Is dll in lock mode? */ | 685 | is_dll_in_lock_mode: |
678 | ldr r4, sdrc_dlla_ctrl | 686 | /* Is dll in lock mode? */ |
679 | ldr r5, [r4] | 687 | ldr r4, sdrc_dlla_ctrl |
680 | tst r5, #0x4 | 688 | ldr r5, [r4] |
681 | bxne lr | 689 | tst r5, #0x4 |
682 | /* wait till dll locks */ | 690 | bxne lr @ Return if locked |
691 | /* wait till dll locks */ | ||
683 | wait_dll_lock_timed: | 692 | wait_dll_lock_timed: |
684 | ldr r4, wait_dll_lock_counter | 693 | ldr r4, wait_dll_lock_counter |
685 | add r4, r4, #1 | 694 | add r4, r4, #1 |
686 | str r4, wait_dll_lock_counter | 695 | str r4, wait_dll_lock_counter |
687 | ldr r4, sdrc_dlla_status | 696 | ldr r4, sdrc_dlla_status |
688 | mov r6, #8 /* Wait 20uS for lock */ | 697 | /* Wait 20uS for lock */ |
698 | mov r6, #8 | ||
689 | wait_dll_lock: | 699 | wait_dll_lock: |
690 | subs r6, r6, #0x1 | 700 | subs r6, r6, #0x1 |
691 | beq kick_dll | 701 | beq kick_dll |
692 | ldr r5, [r4] | 702 | ldr r5, [r4] |
693 | and r5, r5, #0x4 | 703 | and r5, r5, #0x4 |
694 | cmp r5, #0x4 | 704 | cmp r5, #0x4 |
695 | bne wait_dll_lock | 705 | bne wait_dll_lock |
696 | bx lr | 706 | bx lr @ Return when locked |
697 | 707 | ||
698 | /* disable/reenable DLL if not locked */ | 708 | /* disable/reenable DLL if not locked */ |
699 | kick_dll: | 709 | kick_dll: |
700 | ldr r4, sdrc_dlla_ctrl | 710 | ldr r4, sdrc_dlla_ctrl |
701 | ldr r5, [r4] | 711 | ldr r5, [r4] |
702 | mov r6, r5 | 712 | mov r6, r5 |
703 | bic r6, #(1<<3) /* disable dll */ | 713 | bic r6, #(1<<3) @ disable dll |
704 | str r6, [r4] | 714 | str r6, [r4] |
705 | dsb | 715 | dsb |
706 | orr r6, r6, #(1<<3) /* enable dll */ | 716 | orr r6, r6, #(1<<3) @ enable dll |
707 | str r6, [r4] | 717 | str r6, [r4] |
708 | dsb | 718 | dsb |
709 | ldr r4, kick_counter | 719 | ldr r4, kick_counter |
@@ -728,7 +738,7 @@ scratchpad_base: | |||
728 | sram_base: | 738 | sram_base: |
729 | .word SRAM_BASE_P + 0x8000 | 739 | .word SRAM_BASE_P + 0x8000 |
730 | sdrc_power: | 740 | sdrc_power: |
731 | .word SDRC_POWER_V | 741 | .word SDRC_POWER_V |
732 | ttbrbit_mask: | 742 | ttbrbit_mask: |
733 | .word 0xFFFFC000 | 743 | .word 0xFFFFC000 |
734 | table_index_mask: | 744 | table_index_mask: |
@@ -742,9 +752,9 @@ control_stat: | |||
742 | control_mem_rta: | 752 | control_mem_rta: |
743 | .word CONTROL_MEM_RTA_CTRL | 753 | .word CONTROL_MEM_RTA_CTRL |
744 | kernel_flush: | 754 | kernel_flush: |
745 | .word v7_flush_dcache_all | 755 | .word v7_flush_dcache_all |
746 | l2dis_3630: | 756 | l2dis_3630: |
747 | .word 0 | 757 | .word 0 |
748 | /* | 758 | /* |
749 | * When exporting to userspace while the counters are in SRAM, | 759 | * When exporting to userspace while the counters are in SRAM, |
750 | * these 2 words need to be at the end to facilitate retrival! | 760 | * these 2 words need to be at the end to facilitate retrival! |