diff options
Diffstat (limited to 'arch/arm/mach-omap2/sleep34xx.S')
-rw-r--r-- | arch/arm/mach-omap2/sleep34xx.S | 518 |
1 files changed, 189 insertions, 329 deletions
diff --git a/arch/arm/mach-omap2/sleep34xx.S b/arch/arm/mach-omap2/sleep34xx.S index 63f10669571a..f2ea1bd1c691 100644 --- a/arch/arm/mach-omap2/sleep34xx.S +++ b/arch/arm/mach-omap2/sleep34xx.S | |||
@@ -74,46 +74,6 @@ | |||
74 | * API functions | 74 | * API functions |
75 | */ | 75 | */ |
76 | 76 | ||
77 | /* | ||
78 | * The "get_*restore_pointer" functions are used to provide a | ||
79 | * physical restore address where the ROM code jumps while waking | ||
80 | * up from MPU OFF/OSWR state. | ||
81 | * The restore pointer is stored into the scratchpad. | ||
82 | */ | ||
83 | |||
84 | .text | ||
85 | /* Function call to get the restore pointer for resume from OFF */ | ||
86 | ENTRY(get_restore_pointer) | ||
87 | stmfd sp!, {lr} @ save registers on stack | ||
88 | adr r0, restore | ||
89 | ldmfd sp!, {pc} @ restore regs and return | ||
90 | ENDPROC(get_restore_pointer) | ||
91 | .align | ||
92 | ENTRY(get_restore_pointer_sz) | ||
93 | .word . - get_restore_pointer | ||
94 | |||
95 | .text | ||
96 | /* Function call to get the restore pointer for 3630 resume from OFF */ | ||
97 | ENTRY(get_omap3630_restore_pointer) | ||
98 | stmfd sp!, {lr} @ save registers on stack | ||
99 | adr r0, restore_3630 | ||
100 | ldmfd sp!, {pc} @ restore regs and return | ||
101 | ENDPROC(get_omap3630_restore_pointer) | ||
102 | .align | ||
103 | ENTRY(get_omap3630_restore_pointer_sz) | ||
104 | .word . - get_omap3630_restore_pointer | ||
105 | |||
106 | .text | ||
107 | /* Function call to get the restore pointer for ES3 to resume from OFF */ | ||
108 | ENTRY(get_es3_restore_pointer) | ||
109 | stmfd sp!, {lr} @ save registers on stack | ||
110 | adr r0, restore_es3 | ||
111 | ldmfd sp!, {pc} @ restore regs and return | ||
112 | ENDPROC(get_es3_restore_pointer) | ||
113 | .align | ||
114 | ENTRY(get_es3_restore_pointer_sz) | ||
115 | .word . - get_es3_restore_pointer | ||
116 | |||
117 | .text | 77 | .text |
118 | /* | 78 | /* |
119 | * L2 cache needs to be toggled for stable OFF mode functionality on 3630. | 79 | * L2 cache needs to be toggled for stable OFF mode functionality on 3630. |
@@ -133,7 +93,7 @@ ENDPROC(enable_omap3630_toggle_l2_on_restore) | |||
133 | /* Function to call rom code to save secure ram context */ | 93 | /* Function to call rom code to save secure ram context */ |
134 | .align 3 | 94 | .align 3 |
135 | ENTRY(save_secure_ram_context) | 95 | ENTRY(save_secure_ram_context) |
136 | stmfd sp!, {r1-r12, lr} @ save registers on stack | 96 | stmfd sp!, {r4 - r11, lr} @ save registers on stack |
137 | adr r3, api_params @ r3 points to parameters | 97 | adr r3, api_params @ r3 points to parameters |
138 | str r0, [r3,#0x4] @ r0 has sdram address | 98 | str r0, [r3,#0x4] @ r0 has sdram address |
139 | ldr r12, high_mask | 99 | ldr r12, high_mask |
@@ -152,7 +112,7 @@ ENTRY(save_secure_ram_context) | |||
152 | nop | 112 | nop |
153 | nop | 113 | nop |
154 | nop | 114 | nop |
155 | ldmfd sp!, {r1-r12, pc} | 115 | ldmfd sp!, {r4 - r11, pc} |
156 | .align | 116 | .align |
157 | sram_phy_addr_mask: | 117 | sram_phy_addr_mask: |
158 | .word SRAM_BASE_P | 118 | .word SRAM_BASE_P |
@@ -179,69 +139,38 @@ ENTRY(save_secure_ram_context_sz) | |||
179 | * | 139 | * |
180 | * | 140 | * |
181 | * Notes: | 141 | * Notes: |
182 | * - this code gets copied to internal SRAM at boot and after wake-up | 142 | * - only the minimum set of functions gets copied to internal SRAM at boot |
183 | * from OFF mode. The execution pointer in SRAM is _omap_sram_idle. | 143 | * and after wake-up from OFF mode, cf. omap_push_sram_idle. The function |
144 | * pointers in SDRAM or SRAM are called depending on the desired low power | ||
145 | * target state. | ||
184 | * - when the OMAP wakes up it continues at different execution points | 146 | * - when the OMAP wakes up it continues at different execution points |
185 | * depending on the low power mode (non-OFF vs OFF modes), | 147 | * depending on the low power mode (non-OFF vs OFF modes), |
186 | * cf. 'Resume path for xxx mode' comments. | 148 | * cf. 'Resume path for xxx mode' comments. |
187 | */ | 149 | */ |
188 | .align 3 | 150 | .align 3 |
189 | ENTRY(omap34xx_cpu_suspend) | 151 | ENTRY(omap34xx_cpu_suspend) |
190 | stmfd sp!, {r0-r12, lr} @ save registers on stack | 152 | stmfd sp!, {r4 - r11, lr} @ save registers on stack |
191 | 153 | ||
192 | /* | 154 | /* |
193 | * r0 contains CPU context save/restore pointer in sdram | 155 | * r0 contains information about saving context: |
194 | * r1 contains information about saving context: | ||
195 | * 0 - No context lost | 156 | * 0 - No context lost |
196 | * 1 - Only L1 and logic lost | 157 | * 1 - Only L1 and logic lost |
197 | * 2 - Only L2 lost (Even L1 is retained we clean it along with L2) | 158 | * 2 - Only L2 lost (Even L1 is retained we clean it along with L2) |
198 | * 3 - Both L1 and L2 lost and logic lost | 159 | * 3 - Both L1 and L2 lost and logic lost |
199 | */ | 160 | */ |
200 | 161 | ||
201 | /* Directly jump to WFI is the context save is not required */ | 162 | /* |
202 | cmp r1, #0x0 | 163 | * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi) |
203 | beq omap3_do_wfi | 164 | * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram) |
165 | */ | ||
166 | ldr r4, omap3_do_wfi_sram_addr | ||
167 | ldr r5, [r4] | ||
168 | cmp r0, #0x0 @ If no context save required, | ||
169 | bxeq r5 @ jump to the WFI code in SRAM | ||
170 | |||
204 | 171 | ||
205 | /* Otherwise fall through to the save context code */ | 172 | /* Otherwise fall through to the save context code */ |
206 | save_context_wfi: | 173 | save_context_wfi: |
207 | mov r8, r0 @ Store SDRAM address in r8 | ||
208 | mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register | ||
209 | mov r4, #0x1 @ Number of parameters for restore call | ||
210 | stmia r8!, {r4-r5} @ Push parameters for restore call | ||
211 | mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register | ||
212 | stmia r8!, {r4-r5} @ Push parameters for restore call | ||
213 | |||
214 | /* Check what that target sleep state is from r1 */ | ||
215 | cmp r1, #0x2 @ Only L2 lost, no need to save context | ||
216 | beq clean_caches | ||
217 | |||
218 | l1_logic_lost: | ||
219 | mov r4, sp @ Store sp | ||
220 | mrs r5, spsr @ Store spsr | ||
221 | mov r6, lr @ Store lr | ||
222 | stmia r8!, {r4-r6} | ||
223 | |||
224 | mrc p15, 0, r4, c1, c0, 2 @ Coprocessor access control register | ||
225 | mrc p15, 0, r5, c2, c0, 0 @ TTBR0 | ||
226 | mrc p15, 0, r6, c2, c0, 1 @ TTBR1 | ||
227 | mrc p15, 0, r7, c2, c0, 2 @ TTBCR | ||
228 | stmia r8!, {r4-r7} | ||
229 | |||
230 | mrc p15, 0, r4, c3, c0, 0 @ Domain access Control Register | ||
231 | mrc p15, 0, r5, c10, c2, 0 @ PRRR | ||
232 | mrc p15, 0, r6, c10, c2, 1 @ NMRR | ||
233 | stmia r8!,{r4-r6} | ||
234 | |||
235 | mrc p15, 0, r4, c13, c0, 1 @ Context ID | ||
236 | mrc p15, 0, r5, c13, c0, 2 @ User r/w thread and process ID | ||
237 | mrc p15, 0, r6, c12, c0, 0 @ Secure or NS vector base address | ||
238 | mrs r7, cpsr @ Store current cpsr | ||
239 | stmia r8!, {r4-r7} | ||
240 | |||
241 | mrc p15, 0, r4, c1, c0, 0 @ save control register | ||
242 | stmia r8!, {r4} | ||
243 | |||
244 | clean_caches: | ||
245 | /* | 174 | /* |
246 | * jump out to kernel flush routine | 175 | * jump out to kernel flush routine |
247 | * - reuse that code is better | 176 | * - reuse that code is better |
@@ -284,7 +213,32 @@ clean_caches: | |||
284 | THUMB( nop ) | 213 | THUMB( nop ) |
285 | .arm | 214 | .arm |
286 | 215 | ||
287 | omap3_do_wfi: | 216 | b omap3_do_wfi |
217 | |||
218 | /* | ||
219 | * Local variables | ||
220 | */ | ||
221 | omap3_do_wfi_sram_addr: | ||
222 | .word omap3_do_wfi_sram | ||
223 | kernel_flush: | ||
224 | .word v7_flush_dcache_all | ||
225 | |||
226 | /* =================================== | ||
227 | * == WFI instruction => Enter idle == | ||
228 | * =================================== | ||
229 | */ | ||
230 | |||
231 | /* | ||
232 | * Do WFI instruction | ||
233 | * Includes the resume path for non-OFF modes | ||
234 | * | ||
235 | * This code gets copied to internal SRAM and is accessible | ||
236 | * from both SDRAM and SRAM: | ||
237 | * - executed from SRAM for non-off modes (omap3_do_wfi_sram), | ||
238 | * - executed from SDRAM for OFF mode (omap3_do_wfi). | ||
239 | */ | ||
240 | .align 3 | ||
241 | ENTRY(omap3_do_wfi) | ||
288 | ldr r4, sdrc_power @ read the SDRC_POWER register | 242 | ldr r4, sdrc_power @ read the SDRC_POWER register |
289 | ldr r5, [r4] @ read the contents of SDRC_POWER | 243 | ldr r5, [r4] @ read the contents of SDRC_POWER |
290 | orr r5, r5, #0x40 @ enable self refresh on idle req | 244 | orr r5, r5, #0x40 @ enable self refresh on idle req |
@@ -316,8 +270,86 @@ omap3_do_wfi: | |||
316 | nop | 270 | nop |
317 | nop | 271 | nop |
318 | nop | 272 | nop |
319 | bl wait_sdrc_ok | ||
320 | 273 | ||
274 | /* | ||
275 | * This function implements the erratum ID i581 WA: | ||
276 | * SDRC state restore before accessing the SDRAM | ||
277 | * | ||
278 | * Only used at return from non-OFF mode. For OFF | ||
279 | * mode the ROM code configures the SDRC and | ||
280 | * the DPLL before calling the restore code directly | ||
281 | * from DDR. | ||
282 | */ | ||
283 | |||
284 | /* Make sure SDRC accesses are ok */ | ||
285 | wait_sdrc_ok: | ||
286 | |||
287 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */ | ||
288 | ldr r4, cm_idlest_ckgen | ||
289 | wait_dpll3_lock: | ||
290 | ldr r5, [r4] | ||
291 | tst r5, #1 | ||
292 | beq wait_dpll3_lock | ||
293 | |||
294 | ldr r4, cm_idlest1_core | ||
295 | wait_sdrc_ready: | ||
296 | ldr r5, [r4] | ||
297 | tst r5, #0x2 | ||
298 | bne wait_sdrc_ready | ||
299 | /* allow DLL powerdown upon hw idle req */ | ||
300 | ldr r4, sdrc_power | ||
301 | ldr r5, [r4] | ||
302 | bic r5, r5, #0x40 | ||
303 | str r5, [r4] | ||
304 | |||
305 | /* | ||
306 | * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a | ||
307 | * base instead. | ||
308 | * Be careful not to clobber r7 when maintaing this code. | ||
309 | */ | ||
310 | |||
311 | is_dll_in_lock_mode: | ||
312 | /* Is dll in lock mode? */ | ||
313 | ldr r4, sdrc_dlla_ctrl | ||
314 | ldr r5, [r4] | ||
315 | tst r5, #0x4 | ||
316 | bne exit_nonoff_modes @ Return if locked | ||
317 | /* wait till dll locks */ | ||
318 | adr r7, kick_counter | ||
319 | wait_dll_lock_timed: | ||
320 | ldr r4, wait_dll_lock_counter | ||
321 | add r4, r4, #1 | ||
322 | str r4, [r7, #wait_dll_lock_counter - kick_counter] | ||
323 | ldr r4, sdrc_dlla_status | ||
324 | /* Wait 20uS for lock */ | ||
325 | mov r6, #8 | ||
326 | wait_dll_lock: | ||
327 | subs r6, r6, #0x1 | ||
328 | beq kick_dll | ||
329 | ldr r5, [r4] | ||
330 | and r5, r5, #0x4 | ||
331 | cmp r5, #0x4 | ||
332 | bne wait_dll_lock | ||
333 | b exit_nonoff_modes @ Return when locked | ||
334 | |||
335 | /* disable/reenable DLL if not locked */ | ||
336 | kick_dll: | ||
337 | ldr r4, sdrc_dlla_ctrl | ||
338 | ldr r5, [r4] | ||
339 | mov r6, r5 | ||
340 | bic r6, #(1<<3) @ disable dll | ||
341 | str r6, [r4] | ||
342 | dsb | ||
343 | orr r6, r6, #(1<<3) @ enable dll | ||
344 | str r6, [r4] | ||
345 | dsb | ||
346 | ldr r4, kick_counter | ||
347 | add r4, r4, #1 | ||
348 | str r4, [r7] @ kick_counter | ||
349 | b wait_dll_lock_timed | ||
350 | |||
351 | exit_nonoff_modes: | ||
352 | /* Re-enable C-bit if needed */ | ||
321 | mrc p15, 0, r0, c1, c0, 0 | 353 | mrc p15, 0, r0, c1, c0, 0 |
322 | tst r0, #(1 << 2) @ Check C bit enabled? | 354 | tst r0, #(1 << 2) @ Check C bit enabled? |
323 | orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared | 355 | orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared |
@@ -329,7 +361,32 @@ omap3_do_wfi: | |||
329 | * == Exit point from non-OFF modes == | 361 | * == Exit point from non-OFF modes == |
330 | * =================================== | 362 | * =================================== |
331 | */ | 363 | */ |
332 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 364 | ldmfd sp!, {r4 - r11, pc} @ restore regs and return |
365 | |||
366 | /* | ||
367 | * Local variables | ||
368 | */ | ||
369 | sdrc_power: | ||
370 | .word SDRC_POWER_V | ||
371 | cm_idlest1_core: | ||
372 | .word CM_IDLEST1_CORE_V | ||
373 | cm_idlest_ckgen: | ||
374 | .word CM_IDLEST_CKGEN_V | ||
375 | sdrc_dlla_status: | ||
376 | .word SDRC_DLLA_STATUS_V | ||
377 | sdrc_dlla_ctrl: | ||
378 | .word SDRC_DLLA_CTRL_V | ||
379 | /* | ||
380 | * When exporting to userspace while the counters are in SRAM, | ||
381 | * these 2 words need to be at the end to facilitate retrival! | ||
382 | */ | ||
383 | kick_counter: | ||
384 | .word 0 | ||
385 | wait_dll_lock_counter: | ||
386 | .word 0 | ||
387 | |||
388 | ENTRY(omap3_do_wfi_sz) | ||
389 | .word . - omap3_do_wfi | ||
333 | 390 | ||
334 | 391 | ||
335 | /* | 392 | /* |
@@ -346,13 +403,17 @@ omap3_do_wfi: | |||
346 | * restore_es3: applies to 34xx >= ES3.0 | 403 | * restore_es3: applies to 34xx >= ES3.0 |
347 | * restore_3630: applies to 36xx | 404 | * restore_3630: applies to 36xx |
348 | * restore: common code for 3xxx | 405 | * restore: common code for 3xxx |
406 | * | ||
407 | * Note: when back from CORE and MPU OFF mode we are running | ||
408 | * from SDRAM, without MMU, without the caches and prediction. | ||
409 | * Also the SRAM content has been cleared. | ||
349 | */ | 410 | */ |
350 | restore_es3: | 411 | ENTRY(omap3_restore_es3) |
351 | ldr r5, pm_prepwstst_core_p | 412 | ldr r5, pm_prepwstst_core_p |
352 | ldr r4, [r5] | 413 | ldr r4, [r5] |
353 | and r4, r4, #0x3 | 414 | and r4, r4, #0x3 |
354 | cmp r4, #0x0 @ Check if previous power state of CORE is OFF | 415 | cmp r4, #0x0 @ Check if previous power state of CORE is OFF |
355 | bne restore | 416 | bne omap3_restore @ Fall through to OMAP3 common code |
356 | adr r0, es3_sdrc_fix | 417 | adr r0, es3_sdrc_fix |
357 | ldr r1, sram_base | 418 | ldr r1, sram_base |
358 | ldr r2, es3_sdrc_fix_sz | 419 | ldr r2, es3_sdrc_fix_sz |
@@ -364,35 +425,32 @@ copy_to_sram: | |||
364 | bne copy_to_sram | 425 | bne copy_to_sram |
365 | ldr r1, sram_base | 426 | ldr r1, sram_base |
366 | blx r1 | 427 | blx r1 |
367 | b restore | 428 | b omap3_restore @ Fall through to OMAP3 common code |
429 | ENDPROC(omap3_restore_es3) | ||
368 | 430 | ||
369 | restore_3630: | 431 | ENTRY(omap3_restore_3630) |
370 | ldr r1, pm_prepwstst_core_p | 432 | ldr r1, pm_prepwstst_core_p |
371 | ldr r2, [r1] | 433 | ldr r2, [r1] |
372 | and r2, r2, #0x3 | 434 | and r2, r2, #0x3 |
373 | cmp r2, #0x0 @ Check if previous power state of CORE is OFF | 435 | cmp r2, #0x0 @ Check if previous power state of CORE is OFF |
374 | bne restore | 436 | bne omap3_restore @ Fall through to OMAP3 common code |
375 | /* Disable RTA before giving control */ | 437 | /* Disable RTA before giving control */ |
376 | ldr r1, control_mem_rta | 438 | ldr r1, control_mem_rta |
377 | mov r2, #OMAP36XX_RTA_DISABLE | 439 | mov r2, #OMAP36XX_RTA_DISABLE |
378 | str r2, [r1] | 440 | str r2, [r1] |
441 | ENDPROC(omap3_restore_3630) | ||
379 | 442 | ||
380 | /* Fall through to common code for the remaining logic */ | 443 | /* Fall through to common code for the remaining logic */ |
381 | 444 | ||
382 | restore: | 445 | ENTRY(omap3_restore) |
383 | /* | 446 | /* |
384 | * Check what was the reason for mpu reset and store the reason in r9: | 447 | * Read the pwstctrl register to check the reason for mpu reset. |
385 | * 0 - No context lost | 448 | * This tells us what was lost. |
386 | * 1 - Only L1 and logic lost | ||
387 | * 2 - Only L2 lost - In this case, we wont be here | ||
388 | * 3 - Both L1 and L2 lost | ||
389 | */ | 449 | */ |
390 | ldr r1, pm_pwstctrl_mpu | 450 | ldr r1, pm_pwstctrl_mpu |
391 | ldr r2, [r1] | 451 | ldr r2, [r1] |
392 | and r2, r2, #0x3 | 452 | and r2, r2, #0x3 |
393 | cmp r2, #0x0 @ Check if target power state was OFF or RET | 453 | cmp r2, #0x0 @ Check if target power state was OFF or RET |
394 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost | ||
395 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation | ||
396 | bne logic_l1_restore | 454 | bne logic_l1_restore |
397 | 455 | ||
398 | ldr r0, l2dis_3630 | 456 | ldr r0, l2dis_3630 |
@@ -471,115 +529,39 @@ logic_l1_restore: | |||
471 | orr r1, r1, #2 @ re-enable L2 cache | 529 | orr r1, r1, #2 @ re-enable L2 cache |
472 | mcr p15, 0, r1, c1, c0, 1 | 530 | mcr p15, 0, r1, c1, c0, 1 |
473 | skipl2reen: | 531 | skipl2reen: |
474 | mov r1, #0 | ||
475 | /* | ||
476 | * Invalidate all instruction caches to PoU | ||
477 | * and flush branch target cache | ||
478 | */ | ||
479 | mcr p15, 0, r1, c7, c5, 0 | ||
480 | 532 | ||
481 | ldr r4, scratchpad_base | 533 | /* Now branch to the common CPU resume function */ |
482 | ldr r3, [r4,#0xBC] | 534 | b cpu_resume |
483 | adds r3, r3, #16 | 535 | ENDPROC(omap3_restore) |
484 | 536 | ||
485 | ldmia r3!, {r4-r6} | 537 | .ltorg |
486 | mov sp, r4 @ Restore sp | ||
487 | msr spsr_cxsf, r5 @ Restore spsr | ||
488 | mov lr, r6 @ Restore lr | ||
489 | |||
490 | ldmia r3!, {r4-r7} | ||
491 | mcr p15, 0, r4, c1, c0, 2 @ Coprocessor access Control Register | ||
492 | mcr p15, 0, r5, c2, c0, 0 @ TTBR0 | ||
493 | mcr p15, 0, r6, c2, c0, 1 @ TTBR1 | ||
494 | mcr p15, 0, r7, c2, c0, 2 @ TTBCR | ||
495 | |||
496 | ldmia r3!,{r4-r6} | ||
497 | mcr p15, 0, r4, c3, c0, 0 @ Domain access Control Register | ||
498 | mcr p15, 0, r5, c10, c2, 0 @ PRRR | ||
499 | mcr p15, 0, r6, c10, c2, 1 @ NMRR | ||
500 | |||
501 | |||
502 | ldmia r3!,{r4-r7} | ||
503 | mcr p15, 0, r4, c13, c0, 1 @ Context ID | ||
504 | mcr p15, 0, r5, c13, c0, 2 @ User r/w thread and process ID | ||
505 | mrc p15, 0, r6, c12, c0, 0 @ Secure or NS vector base address | ||
506 | msr cpsr, r7 @ store cpsr | ||
507 | |||
508 | /* Enabling MMU here */ | ||
509 | mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl | ||
510 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */ | ||
511 | and r7, #0x7 | ||
512 | cmp r7, #0x0 | ||
513 | beq usettbr0 | ||
514 | ttbr_error: | ||
515 | /* | ||
516 | * More work needs to be done to support N[0:2] value other than 0 | ||
517 | * So looping here so that the error can be detected | ||
518 | */ | ||
519 | b ttbr_error | ||
520 | usettbr0: | ||
521 | mrc p15, 0, r2, c2, c0, 0 | ||
522 | ldr r5, ttbrbit_mask | ||
523 | and r2, r5 | ||
524 | mov r4, pc | ||
525 | ldr r5, table_index_mask | ||
526 | and r4, r5 @ r4 = 31 to 20 bits of pc | ||
527 | /* Extract the value to be written to table entry */ | ||
528 | ldr r1, table_entry | ||
529 | /* r1 has the value to be written to table entry*/ | ||
530 | add r1, r1, r4 | ||
531 | /* Getting the address of table entry to modify */ | ||
532 | lsr r4, #18 | ||
533 | /* r2 has the location which needs to be modified */ | ||
534 | add r2, r4 | ||
535 | /* Storing previous entry of location being modified */ | ||
536 | ldr r5, scratchpad_base | ||
537 | ldr r4, [r2] | ||
538 | str r4, [r5, #0xC0] | ||
539 | /* Modify the table entry */ | ||
540 | str r1, [r2] | ||
541 | /* | ||
542 | * Storing address of entry being modified | ||
543 | * - will be restored after enabling MMU | ||
544 | */ | ||
545 | ldr r5, scratchpad_base | ||
546 | str r2, [r5, #0xC4] | ||
547 | |||
548 | mov r0, #0 | ||
549 | mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer | ||
550 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array | ||
551 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB | ||
552 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB | ||
553 | /* | ||
554 | * Restore control register. This enables the MMU. | ||
555 | * The caches and prediction are not enabled here, they | ||
556 | * will be enabled after restoring the MMU table entry. | ||
557 | */ | ||
558 | ldmia r3!, {r4} | ||
559 | /* Store previous value of control register in scratchpad */ | ||
560 | str r4, [r5, #0xC8] | ||
561 | ldr r2, cache_pred_disable_mask | ||
562 | and r4, r2 | ||
563 | mcr p15, 0, r4, c1, c0, 0 | ||
564 | dsb | ||
565 | isb | ||
566 | ldr r0, =restoremmu_on | ||
567 | bx r0 | ||
568 | 538 | ||
569 | /* | 539 | /* |
570 | * ============================== | 540 | * Local variables |
571 | * == Exit point from OFF mode == | ||
572 | * ============================== | ||
573 | */ | 541 | */ |
574 | restoremmu_on: | 542 | pm_prepwstst_core_p: |
575 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 543 | .word PM_PREPWSTST_CORE_P |
576 | 544 | pm_pwstctrl_mpu: | |
545 | .word PM_PWSTCTRL_MPU_P | ||
546 | scratchpad_base: | ||
547 | .word SCRATCHPAD_BASE_P | ||
548 | sram_base: | ||
549 | .word SRAM_BASE_P + 0x8000 | ||
550 | control_stat: | ||
551 | .word CONTROL_STAT | ||
552 | control_mem_rta: | ||
553 | .word CONTROL_MEM_RTA_CTRL | ||
554 | l2dis_3630: | ||
555 | .word 0 | ||
577 | 556 | ||
578 | /* | 557 | /* |
579 | * Internal functions | 558 | * Internal functions |
580 | */ | 559 | */ |
581 | 560 | ||
582 | /* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */ | 561 | /* |
562 | * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 | ||
563 | * Copied to and run from SRAM in order to reconfigure the SDRC parameters. | ||
564 | */ | ||
583 | .text | 565 | .text |
584 | .align 3 | 566 | .align 3 |
585 | ENTRY(es3_sdrc_fix) | 567 | ENTRY(es3_sdrc_fix) |
@@ -609,6 +591,9 @@ ENTRY(es3_sdrc_fix) | |||
609 | str r5, [r4] @ kick off refreshes | 591 | str r5, [r4] @ kick off refreshes |
610 | bx lr | 592 | bx lr |
611 | 593 | ||
594 | /* | ||
595 | * Local variables | ||
596 | */ | ||
612 | .align | 597 | .align |
613 | sdrc_syscfg: | 598 | sdrc_syscfg: |
614 | .word SDRC_SYSCONFIG_P | 599 | .word SDRC_SYSCONFIG_P |
@@ -627,128 +612,3 @@ sdrc_manual_1: | |||
627 | ENDPROC(es3_sdrc_fix) | 612 | ENDPROC(es3_sdrc_fix) |
628 | ENTRY(es3_sdrc_fix_sz) | 613 | ENTRY(es3_sdrc_fix_sz) |
629 | .word . - es3_sdrc_fix | 614 | .word . - es3_sdrc_fix |
630 | |||
631 | /* | ||
632 | * This function implements the erratum ID i581 WA: | ||
633 | * SDRC state restore before accessing the SDRAM | ||
634 | * | ||
635 | * Only used at return from non-OFF mode. For OFF | ||
636 | * mode the ROM code configures the SDRC and | ||
637 | * the DPLL before calling the restore code directly | ||
638 | * from DDR. | ||
639 | */ | ||
640 | |||
641 | /* Make sure SDRC accesses are ok */ | ||
642 | wait_sdrc_ok: | ||
643 | |||
644 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */ | ||
645 | ldr r4, cm_idlest_ckgen | ||
646 | wait_dpll3_lock: | ||
647 | ldr r5, [r4] | ||
648 | tst r5, #1 | ||
649 | beq wait_dpll3_lock | ||
650 | |||
651 | ldr r4, cm_idlest1_core | ||
652 | wait_sdrc_ready: | ||
653 | ldr r5, [r4] | ||
654 | tst r5, #0x2 | ||
655 | bne wait_sdrc_ready | ||
656 | /* allow DLL powerdown upon hw idle req */ | ||
657 | ldr r4, sdrc_power | ||
658 | ldr r5, [r4] | ||
659 | bic r5, r5, #0x40 | ||
660 | str r5, [r4] | ||
661 | |||
662 | /* | ||
663 | * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a | ||
664 | * base instead. | ||
665 | * Be careful not to clobber r7 when maintaing this code. | ||
666 | */ | ||
667 | |||
668 | is_dll_in_lock_mode: | ||
669 | /* Is dll in lock mode? */ | ||
670 | ldr r4, sdrc_dlla_ctrl | ||
671 | ldr r5, [r4] | ||
672 | tst r5, #0x4 | ||
673 | bxne lr @ Return if locked | ||
674 | /* wait till dll locks */ | ||
675 | adr r7, kick_counter | ||
676 | wait_dll_lock_timed: | ||
677 | ldr r4, wait_dll_lock_counter | ||
678 | add r4, r4, #1 | ||
679 | str r4, [r7, #wait_dll_lock_counter - kick_counter] | ||
680 | ldr r4, sdrc_dlla_status | ||
681 | /* Wait 20uS for lock */ | ||
682 | mov r6, #8 | ||
683 | wait_dll_lock: | ||
684 | subs r6, r6, #0x1 | ||
685 | beq kick_dll | ||
686 | ldr r5, [r4] | ||
687 | and r5, r5, #0x4 | ||
688 | cmp r5, #0x4 | ||
689 | bne wait_dll_lock | ||
690 | bx lr @ Return when locked | ||
691 | |||
692 | /* disable/reenable DLL if not locked */ | ||
693 | kick_dll: | ||
694 | ldr r4, sdrc_dlla_ctrl | ||
695 | ldr r5, [r4] | ||
696 | mov r6, r5 | ||
697 | bic r6, #(1<<3) @ disable dll | ||
698 | str r6, [r4] | ||
699 | dsb | ||
700 | orr r6, r6, #(1<<3) @ enable dll | ||
701 | str r6, [r4] | ||
702 | dsb | ||
703 | ldr r4, kick_counter | ||
704 | add r4, r4, #1 | ||
705 | str r4, [r7] @ kick_counter | ||
706 | b wait_dll_lock_timed | ||
707 | |||
708 | .align | ||
709 | cm_idlest1_core: | ||
710 | .word CM_IDLEST1_CORE_V | ||
711 | cm_idlest_ckgen: | ||
712 | .word CM_IDLEST_CKGEN_V | ||
713 | sdrc_dlla_status: | ||
714 | .word SDRC_DLLA_STATUS_V | ||
715 | sdrc_dlla_ctrl: | ||
716 | .word SDRC_DLLA_CTRL_V | ||
717 | pm_prepwstst_core_p: | ||
718 | .word PM_PREPWSTST_CORE_P | ||
719 | pm_pwstctrl_mpu: | ||
720 | .word PM_PWSTCTRL_MPU_P | ||
721 | scratchpad_base: | ||
722 | .word SCRATCHPAD_BASE_P | ||
723 | sram_base: | ||
724 | .word SRAM_BASE_P + 0x8000 | ||
725 | sdrc_power: | ||
726 | .word SDRC_POWER_V | ||
727 | ttbrbit_mask: | ||
728 | .word 0xFFFFC000 | ||
729 | table_index_mask: | ||
730 | .word 0xFFF00000 | ||
731 | table_entry: | ||
732 | .word 0x00000C02 | ||
733 | cache_pred_disable_mask: | ||
734 | .word 0xFFFFE7FB | ||
735 | control_stat: | ||
736 | .word CONTROL_STAT | ||
737 | control_mem_rta: | ||
738 | .word CONTROL_MEM_RTA_CTRL | ||
739 | kernel_flush: | ||
740 | .word v7_flush_dcache_all | ||
741 | l2dis_3630: | ||
742 | .word 0 | ||
743 | /* | ||
744 | * When exporting to userspace while the counters are in SRAM, | ||
745 | * these 2 words need to be at the end to facilitate retrival! | ||
746 | */ | ||
747 | kick_counter: | ||
748 | .word 0 | ||
749 | wait_dll_lock_counter: | ||
750 | .word 0 | ||
751 | ENDPROC(omap34xx_cpu_suspend) | ||
752 | |||
753 | ENTRY(omap34xx_cpu_suspend_sz) | ||
754 | .word . - omap34xx_cpu_suspend | ||