aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/mach-omap2/sleep34xx.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/mach-omap2/sleep34xx.S')
-rw-r--r--arch/arm/mach-omap2/sleep34xx.S282
1 files changed, 133 insertions, 149 deletions
diff --git a/arch/arm/mach-omap2/sleep34xx.S b/arch/arm/mach-omap2/sleep34xx.S
index 951a0be66cf7..63f10669571a 100644
--- a/arch/arm/mach-omap2/sleep34xx.S
+++ b/arch/arm/mach-omap2/sleep34xx.S
@@ -64,6 +64,11 @@
64#define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) 64#define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
65#define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) 65#define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
66 66
67/*
68 * This file needs be built unconditionally as ARM to interoperate correctly
69 * with non-Thumb-2-capable firmware.
70 */
71 .arm
67 72
68/* 73/*
69 * API functions 74 * API functions
@@ -82,6 +87,8 @@ ENTRY(get_restore_pointer)
82 stmfd sp!, {lr} @ save registers on stack 87 stmfd sp!, {lr} @ save registers on stack
83 adr r0, restore 88 adr r0, restore
84 ldmfd sp!, {pc} @ restore regs and return 89 ldmfd sp!, {pc} @ restore regs and return
90ENDPROC(get_restore_pointer)
91 .align
85ENTRY(get_restore_pointer_sz) 92ENTRY(get_restore_pointer_sz)
86 .word . - get_restore_pointer 93 .word . - get_restore_pointer
87 94
@@ -91,6 +98,8 @@ ENTRY(get_omap3630_restore_pointer)
91 stmfd sp!, {lr} @ save registers on stack 98 stmfd sp!, {lr} @ save registers on stack
92 adr r0, restore_3630 99 adr r0, restore_3630
93 ldmfd sp!, {pc} @ restore regs and return 100 ldmfd sp!, {pc} @ restore regs and return
101ENDPROC(get_omap3630_restore_pointer)
102 .align
94ENTRY(get_omap3630_restore_pointer_sz) 103ENTRY(get_omap3630_restore_pointer_sz)
95 .word . - get_omap3630_restore_pointer 104 .word . - get_omap3630_restore_pointer
96 105
@@ -100,6 +109,8 @@ ENTRY(get_es3_restore_pointer)
100 stmfd sp!, {lr} @ save registers on stack 109 stmfd sp!, {lr} @ save registers on stack
101 adr r0, restore_es3 110 adr r0, restore_es3
102 ldmfd sp!, {pc} @ restore regs and return 111 ldmfd sp!, {pc} @ restore regs and return
112ENDPROC(get_es3_restore_pointer)
113 .align
103ENTRY(get_es3_restore_pointer_sz) 114ENTRY(get_es3_restore_pointer_sz)
104 .word . - get_es3_restore_pointer 115 .word . - get_es3_restore_pointer
105 116
@@ -113,8 +124,10 @@ ENTRY(enable_omap3630_toggle_l2_on_restore)
113 stmfd sp!, {lr} @ save registers on stack 124 stmfd sp!, {lr} @ save registers on stack
114 /* Setup so that we will disable and enable l2 */ 125 /* Setup so that we will disable and enable l2 */
115 mov r1, #0x1 126 mov r1, #0x1
116 str r1, l2dis_3630 127 adrl r2, l2dis_3630 @ may be too distant for plain adr
128 str r1, [r2]
117 ldmfd sp!, {pc} @ restore regs and return 129 ldmfd sp!, {pc} @ restore regs and return
130ENDPROC(enable_omap3630_toggle_l2_on_restore)
118 131
119 .text 132 .text
120/* Function to call rom code to save secure ram context */ 133/* Function to call rom code to save secure ram context */
@@ -132,20 +145,22 @@ ENTRY(save_secure_ram_context)
132 mov r1, #0 @ set task id for ROM code in r1 145 mov r1, #0 @ set task id for ROM code in r1
133 mov r2, #4 @ set some flags in r2, r6 146 mov r2, #4 @ set some flags in r2, r6
134 mov r6, #0xff 147 mov r6, #0xff
135 mcr p15, 0, r0, c7, c10, 4 @ data write barrier 148 dsb @ data write barrier
136 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 149 dmb @ data memory barrier
137 .word 0xE1600071 @ call SMI monitor (smi #1) 150 smc #1 @ call SMI monitor (smi #1)
138 nop 151 nop
139 nop 152 nop
140 nop 153 nop
141 nop 154 nop
142 ldmfd sp!, {r1-r12, pc} 155 ldmfd sp!, {r1-r12, pc}
156 .align
143sram_phy_addr_mask: 157sram_phy_addr_mask:
144 .word SRAM_BASE_P 158 .word SRAM_BASE_P
145high_mask: 159high_mask:
146 .word 0xffff 160 .word 0xffff
147api_params: 161api_params:
148 .word 0x4, 0x0, 0x0, 0x1, 0x1 162 .word 0x4, 0x0, 0x0, 0x1, 0x1
163ENDPROC(save_secure_ram_context)
149ENTRY(save_secure_ram_context_sz) 164ENTRY(save_secure_ram_context_sz)
150 .word . - save_secure_ram_context 165 .word . - save_secure_ram_context
151 166
@@ -175,12 +190,12 @@ ENTRY(omap34xx_cpu_suspend)
175 stmfd sp!, {r0-r12, lr} @ save registers on stack 190 stmfd sp!, {r0-r12, lr} @ save registers on stack
176 191
177 /* 192 /*
178 * r0 contains restore pointer in sdram 193 * r0 contains CPU context save/restore pointer in sdram
179 * r1 contains information about saving context: 194 * r1 contains information about saving context:
180 * 0 - No context lost 195 * 0 - No context lost
181 * 1 - Only L1 and logic lost 196 * 1 - Only L1 and logic lost
182 * 2 - Only L2 lost 197 * 2 - Only L2 lost (Even L1 is retained we clean it along with L2)
183 * 3 - Both L1 and L2 lost 198 * 3 - Both L1 and L2 lost and logic lost
184 */ 199 */
185 200
186 /* Directly jump to WFI is the context save is not required */ 201 /* Directly jump to WFI is the context save is not required */
@@ -201,89 +216,74 @@ save_context_wfi:
201 beq clean_caches 216 beq clean_caches
202 217
203l1_logic_lost: 218l1_logic_lost:
204 /* Store sp and spsr to SDRAM */ 219 mov r4, sp @ Store sp
205 mov r4, sp 220 mrs r5, spsr @ Store spsr
206 mrs r5, spsr 221 mov r6, lr @ Store lr
207 mov r6, lr
208 stmia r8!, {r4-r6} 222 stmia r8!, {r4-r6}
209 /* Save all ARM registers */ 223
210 /* Coprocessor access control register */ 224 mrc p15, 0, r4, c1, c0, 2 @ Coprocessor access control register
211 mrc p15, 0, r6, c1, c0, 2 225 mrc p15, 0, r5, c2, c0, 0 @ TTBR0
212 stmia r8!, {r6} 226 mrc p15, 0, r6, c2, c0, 1 @ TTBR1
213 /* TTBR0, TTBR1 and Translation table base control */ 227 mrc p15, 0, r7, c2, c0, 2 @ TTBCR
214 mrc p15, 0, r4, c2, c0, 0
215 mrc p15, 0, r5, c2, c0, 1
216 mrc p15, 0, r6, c2, c0, 2
217 stmia r8!, {r4-r6}
218 /*
219 * Domain access control register, data fault status register,
220 * and instruction fault status register
221 */
222 mrc p15, 0, r4, c3, c0, 0
223 mrc p15, 0, r5, c5, c0, 0
224 mrc p15, 0, r6, c5, c0, 1
225 stmia r8!, {r4-r6}
226 /*
227 * Data aux fault status register, instruction aux fault status,
228 * data fault address register and instruction fault address register
229 */
230 mrc p15, 0, r4, c5, c1, 0
231 mrc p15, 0, r5, c5, c1, 1
232 mrc p15, 0, r6, c6, c0, 0
233 mrc p15, 0, r7, c6, c0, 2
234 stmia r8!, {r4-r7}
235 /*
236 * user r/w thread and process ID, user r/o thread and process ID,
237 * priv only thread and process ID, cache size selection
238 */
239 mrc p15, 0, r4, c13, c0, 2
240 mrc p15, 0, r5, c13, c0, 3
241 mrc p15, 0, r6, c13, c0, 4
242 mrc p15, 2, r7, c0, c0, 0
243 stmia r8!, {r4-r7} 228 stmia r8!, {r4-r7}
244 /* Data TLB lockdown, instruction TLB lockdown registers */
245 mrc p15, 0, r5, c10, c0, 0
246 mrc p15, 0, r6, c10, c0, 1
247 stmia r8!, {r5-r6}
248 /* Secure or non secure vector base address, FCSE PID, Context PID*/
249 mrc p15, 0, r4, c12, c0, 0
250 mrc p15, 0, r5, c13, c0, 0
251 mrc p15, 0, r6, c13, c0, 1
252 stmia r8!, {r4-r6}
253 /* Primary remap, normal remap registers */
254 mrc p15, 0, r4, c10, c2, 0
255 mrc p15, 0, r5, c10, c2, 1
256 stmia r8!,{r4-r5}
257 229
258 /* Store current cpsr*/ 230 mrc p15, 0, r4, c3, c0, 0 @ Domain access Control Register
259 mrs r2, cpsr 231 mrc p15, 0, r5, c10, c2, 0 @ PRRR
260 stmia r8!, {r2} 232 mrc p15, 0, r6, c10, c2, 1 @ NMRR
233 stmia r8!,{r4-r6}
261 234
262 mrc p15, 0, r4, c1, c0, 0 235 mrc p15, 0, r4, c13, c0, 1 @ Context ID
263 /* save control register */ 236 mrc p15, 0, r5, c13, c0, 2 @ User r/w thread and process ID
237 mrc p15, 0, r6, c12, c0, 0 @ Secure or NS vector base address
238 mrs r7, cpsr @ Store current cpsr
239 stmia r8!, {r4-r7}
240
241 mrc p15, 0, r4, c1, c0, 0 @ save control register
264 stmia r8!, {r4} 242 stmia r8!, {r4}
265 243
266clean_caches: 244clean_caches:
267 /* 245 /*
268 * Clean Data or unified cache to POU
269 * How to invalidate only L1 cache???? - #FIX_ME#
270 * mcr p15, 0, r11, c7, c11, 1
271 */
272 cmp r1, #0x1 @ Check whether L2 inval is required
273 beq omap3_do_wfi
274
275clean_l2:
276 /*
277 * jump out to kernel flush routine 246 * jump out to kernel flush routine
278 * - reuse that code is better 247 * - reuse that code is better
279 * - it executes in a cached space so is faster than refetch per-block 248 * - it executes in a cached space so is faster than refetch per-block
280 * - should be faster and will change with kernel 249 * - should be faster and will change with kernel
281 * - 'might' have to copy address, load and jump to it 250 * - 'might' have to copy address, load and jump to it
251 * Flush all data from the L1 data cache before disabling
252 * SCTLR.C bit.
282 */ 253 */
283 ldr r1, kernel_flush 254 ldr r1, kernel_flush
284 mov lr, pc 255 mov lr, pc
285 bx r1 256 bx r1
286 257
258 /*
259 * Clear the SCTLR.C bit to prevent further data cache
260 * allocation. Clearing SCTLR.C would make all the data accesses
261 * strongly ordered and would not hit the cache.
262 */
263 mrc p15, 0, r0, c1, c0, 0
264 bic r0, r0, #(1 << 2) @ Disable the C bit
265 mcr p15, 0, r0, c1, c0, 0
266 isb
267
268 /*
269 * Invalidate L1 data cache. Even though only invalidate is
270 * necessary exported flush API is used here. Doing clean
271 * on already clean cache would be almost NOP.
272 */
273 ldr r1, kernel_flush
274 blx r1
275 /*
276 * The kernel doesn't interwork: v7_flush_dcache_all in particluar will
277 * always return in Thumb state when CONFIG_THUMB2_KERNEL is enabled.
278 * This sequence switches back to ARM. Note that .align may insert a
279 * nop: bx pc needs to be word-aligned in order to work.
280 */
281 THUMB( .thumb )
282 THUMB( .align )
283 THUMB( bx pc )
284 THUMB( nop )
285 .arm
286
287omap3_do_wfi: 287omap3_do_wfi:
288 ldr r4, sdrc_power @ read the SDRC_POWER register 288 ldr r4, sdrc_power @ read the SDRC_POWER register
289 ldr r5, [r4] @ read the contents of SDRC_POWER 289 ldr r5, [r4] @ read the contents of SDRC_POWER
@@ -291,9 +291,8 @@ omap3_do_wfi:
291 str r5, [r4] @ write back to SDRC_POWER register 291 str r5, [r4] @ write back to SDRC_POWER register
292 292
293 /* Data memory barrier and Data sync barrier */ 293 /* Data memory barrier and Data sync barrier */
294 mov r1, #0 294 dsb
295 mcr p15, 0, r1, c7, c10, 4 295 dmb
296 mcr p15, 0, r1, c7, c10, 5
297 296
298/* 297/*
299 * =================================== 298 * ===================================
@@ -319,6 +318,12 @@ omap3_do_wfi:
319 nop 318 nop
320 bl wait_sdrc_ok 319 bl wait_sdrc_ok
321 320
321 mrc p15, 0, r0, c1, c0, 0
322 tst r0, #(1 << 2) @ Check C bit enabled?
323 orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared
324 mcreq p15, 0, r0, c1, c0, 0
325 isb
326
322/* 327/*
323 * =================================== 328 * ===================================
324 * == Exit point from non-OFF modes == 329 * == Exit point from non-OFF modes ==
@@ -408,9 +413,9 @@ skipl2dis:
408 mov r2, #4 @ set some flags in r2, r6 413 mov r2, #4 @ set some flags in r2, r6
409 mov r6, #0xff 414 mov r6, #0xff
410 adr r3, l2_inv_api_params @ r3 points to dummy parameters 415 adr r3, l2_inv_api_params @ r3 points to dummy parameters
411 mcr p15, 0, r0, c7, c10, 4 @ data write barrier 416 dsb @ data write barrier
412 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 417 dmb @ data memory barrier
413 .word 0xE1600071 @ call SMI monitor (smi #1) 418 smc #1 @ call SMI monitor (smi #1)
414 /* Write to Aux control register to set some bits */ 419 /* Write to Aux control register to set some bits */
415 mov r0, #42 @ set service ID for PPA 420 mov r0, #42 @ set service ID for PPA
416 mov r12, r0 @ copy secure Service ID in r12 421 mov r12, r0 @ copy secure Service ID in r12
@@ -419,9 +424,9 @@ skipl2dis:
419 mov r6, #0xff 424 mov r6, #0xff
420 ldr r4, scratchpad_base 425 ldr r4, scratchpad_base
421 ldr r3, [r4, #0xBC] @ r3 points to parameters 426 ldr r3, [r4, #0xBC] @ r3 points to parameters
422 mcr p15, 0, r0, c7, c10, 4 @ data write barrier 427 dsb @ data write barrier
423 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 428 dmb @ data memory barrier
424 .word 0xE1600071 @ call SMI monitor (smi #1) 429 smc #1 @ call SMI monitor (smi #1)
425 430
426#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE 431#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
427 /* Restore L2 aux control register */ 432 /* Restore L2 aux control register */
@@ -434,29 +439,30 @@ skipl2dis:
434 ldr r4, scratchpad_base 439 ldr r4, scratchpad_base
435 ldr r3, [r4, #0xBC] 440 ldr r3, [r4, #0xBC]
436 adds r3, r3, #8 @ r3 points to parameters 441 adds r3, r3, #8 @ r3 points to parameters
437 mcr p15, 0, r0, c7, c10, 4 @ data write barrier 442 dsb @ data write barrier
438 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 443 dmb @ data memory barrier
439 .word 0xE1600071 @ call SMI monitor (smi #1) 444 smc #1 @ call SMI monitor (smi #1)
440#endif 445#endif
441 b logic_l1_restore 446 b logic_l1_restore
442 447
448 .align
443l2_inv_api_params: 449l2_inv_api_params:
444 .word 0x1, 0x00 450 .word 0x1, 0x00
445l2_inv_gp: 451l2_inv_gp:
446 /* Execute smi to invalidate L2 cache */ 452 /* Execute smi to invalidate L2 cache */
447 mov r12, #0x1 @ set up to invalidate L2 453 mov r12, #0x1 @ set up to invalidate L2
448 .word 0xE1600070 @ Call SMI monitor (smieq) 454 smc #0 @ Call SMI monitor (smieq)
449 /* Write to Aux control register to set some bits */ 455 /* Write to Aux control register to set some bits */
450 ldr r4, scratchpad_base 456 ldr r4, scratchpad_base
451 ldr r3, [r4,#0xBC] 457 ldr r3, [r4,#0xBC]
452 ldr r0, [r3,#4] 458 ldr r0, [r3,#4]
453 mov r12, #0x3 459 mov r12, #0x3
454 .word 0xE1600070 @ Call SMI monitor (smieq) 460 smc #0 @ Call SMI monitor (smieq)
455 ldr r4, scratchpad_base 461 ldr r4, scratchpad_base
456 ldr r3, [r4,#0xBC] 462 ldr r3, [r4,#0xBC]
457 ldr r0, [r3,#12] 463 ldr r0, [r3,#12]
458 mov r12, #0x2 464 mov r12, #0x2
459 .word 0xE1600070 @ Call SMI monitor (smieq) 465 smc #0 @ Call SMI monitor (smieq)
460logic_l1_restore: 466logic_l1_restore:
461 ldr r1, l2dis_3630 467 ldr r1, l2dis_3630
462 cmp r1, #0x1 @ Test if L2 re-enable needed on 3630 468 cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
@@ -475,68 +481,29 @@ skipl2reen:
475 ldr r4, scratchpad_base 481 ldr r4, scratchpad_base
476 ldr r3, [r4,#0xBC] 482 ldr r3, [r4,#0xBC]
477 adds r3, r3, #16 483 adds r3, r3, #16
484
478 ldmia r3!, {r4-r6} 485 ldmia r3!, {r4-r6}
479 mov sp, r4 486 mov sp, r4 @ Restore sp
480 msr spsr_cxsf, r5 487 msr spsr_cxsf, r5 @ Restore spsr
481 mov lr, r6 488 mov lr, r6 @ Restore lr
482 489
483 ldmia r3!, {r4-r9} 490 ldmia r3!, {r4-r7}
484 /* Coprocessor access Control Register */ 491 mcr p15, 0, r4, c1, c0, 2 @ Coprocessor access Control Register
485 mcr p15, 0, r4, c1, c0, 2 492 mcr p15, 0, r5, c2, c0, 0 @ TTBR0
486 493 mcr p15, 0, r6, c2, c0, 1 @ TTBR1
487 /* TTBR0 */ 494 mcr p15, 0, r7, c2, c0, 2 @ TTBCR
488 MCR p15, 0, r5, c2, c0, 0
489 /* TTBR1 */
490 MCR p15, 0, r6, c2, c0, 1
491 /* Translation table base control register */
492 MCR p15, 0, r7, c2, c0, 2
493 /* Domain access Control Register */
494 MCR p15, 0, r8, c3, c0, 0
495 /* Data fault status Register */
496 MCR p15, 0, r9, c5, c0, 0
497
498 ldmia r3!,{r4-r8}
499 /* Instruction fault status Register */
500 MCR p15, 0, r4, c5, c0, 1
501 /* Data Auxiliary Fault Status Register */
502 MCR p15, 0, r5, c5, c1, 0
503 /* Instruction Auxiliary Fault Status Register*/
504 MCR p15, 0, r6, c5, c1, 1
505 /* Data Fault Address Register */
506 MCR p15, 0, r7, c6, c0, 0
507 /* Instruction Fault Address Register*/
508 MCR p15, 0, r8, c6, c0, 2
509 ldmia r3!,{r4-r7}
510 495
511 /* User r/w thread and process ID */ 496 ldmia r3!,{r4-r6}
512 MCR p15, 0, r4, c13, c0, 2 497 mcr p15, 0, r4, c3, c0, 0 @ Domain access Control Register
513 /* User ro thread and process ID */ 498 mcr p15, 0, r5, c10, c2, 0 @ PRRR
514 MCR p15, 0, r5, c13, c0, 3 499 mcr p15, 0, r6, c10, c2, 1 @ NMRR
515 /* Privileged only thread and process ID */ 500
516 MCR p15, 0, r6, c13, c0, 4 501
517 /* Cache size selection */ 502 ldmia r3!,{r4-r7}
518 MCR p15, 2, r7, c0, c0, 0 503 mcr p15, 0, r4, c13, c0, 1 @ Context ID
519 ldmia r3!,{r4-r8} 504 mcr p15, 0, r5, c13, c0, 2 @ User r/w thread and process ID
520 /* Data TLB lockdown registers */ 505 mrc p15, 0, r6, c12, c0, 0 @ Secure or NS vector base address
521 MCR p15, 0, r4, c10, c0, 0 506 msr cpsr, r7 @ store cpsr
522 /* Instruction TLB lockdown registers */
523 MCR p15, 0, r5, c10, c0, 1
524 /* Secure or Nonsecure Vector Base Address */
525 MCR p15, 0, r6, c12, c0, 0
526 /* FCSE PID */
527 MCR p15, 0, r7, c13, c0, 0
528 /* Context PID */
529 MCR p15, 0, r8, c13, c0, 1
530
531 ldmia r3!,{r4-r5}
532 /* Primary memory remap register */
533 MCR p15, 0, r4, c10, c2, 0
534 /* Normal memory remap register */
535 MCR p15, 0, r5, c10, c2, 1
536
537 /* Restore cpsr */
538 ldmia r3!,{r4} @ load CPSR from SDRAM
539 msr cpsr, r4 @ store cpsr
540 507
541 /* Enabling MMU here */ 508 /* Enabling MMU here */
542 mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl 509 mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl
@@ -594,12 +561,17 @@ usettbr0:
594 ldr r2, cache_pred_disable_mask 561 ldr r2, cache_pred_disable_mask
595 and r4, r2 562 and r4, r2
596 mcr p15, 0, r4, c1, c0, 0 563 mcr p15, 0, r4, c1, c0, 0
564 dsb
565 isb
566 ldr r0, =restoremmu_on
567 bx r0
597 568
598/* 569/*
599 * ============================== 570 * ==============================
600 * == Exit point from OFF mode == 571 * == Exit point from OFF mode ==
601 * ============================== 572 * ==============================
602 */ 573 */
574restoremmu_on:
603 ldmfd sp!, {r0-r12, pc} @ restore regs and return 575 ldmfd sp!, {r0-r12, pc} @ restore regs and return
604 576
605 577
@@ -609,6 +581,7 @@ usettbr0:
609 581
610/* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */ 582/* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */
611 .text 583 .text
584 .align 3
612ENTRY(es3_sdrc_fix) 585ENTRY(es3_sdrc_fix)
613 ldr r4, sdrc_syscfg @ get config addr 586 ldr r4, sdrc_syscfg @ get config addr
614 ldr r5, [r4] @ get value 587 ldr r5, [r4] @ get value
@@ -636,6 +609,7 @@ ENTRY(es3_sdrc_fix)
636 str r5, [r4] @ kick off refreshes 609 str r5, [r4] @ kick off refreshes
637 bx lr 610 bx lr
638 611
612 .align
639sdrc_syscfg: 613sdrc_syscfg:
640 .word SDRC_SYSCONFIG_P 614 .word SDRC_SYSCONFIG_P
641sdrc_mr_0: 615sdrc_mr_0:
@@ -650,6 +624,7 @@ sdrc_emr2_1:
650 .word SDRC_EMR2_1_P 624 .word SDRC_EMR2_1_P
651sdrc_manual_1: 625sdrc_manual_1:
652 .word SDRC_MANUAL_1_P 626 .word SDRC_MANUAL_1_P
627ENDPROC(es3_sdrc_fix)
653ENTRY(es3_sdrc_fix_sz) 628ENTRY(es3_sdrc_fix_sz)
654 .word . - es3_sdrc_fix 629 .word . - es3_sdrc_fix
655 630
@@ -684,6 +659,12 @@ wait_sdrc_ready:
684 bic r5, r5, #0x40 659 bic r5, r5, #0x40
685 str r5, [r4] 660 str r5, [r4]
686 661
662/*
663 * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a
664 * base instead.
665 * Be careful not to clobber r7 when maintaing this code.
666 */
667
687is_dll_in_lock_mode: 668is_dll_in_lock_mode:
688 /* Is dll in lock mode? */ 669 /* Is dll in lock mode? */
689 ldr r4, sdrc_dlla_ctrl 670 ldr r4, sdrc_dlla_ctrl
@@ -691,10 +672,11 @@ is_dll_in_lock_mode:
691 tst r5, #0x4 672 tst r5, #0x4
692 bxne lr @ Return if locked 673 bxne lr @ Return if locked
693 /* wait till dll locks */ 674 /* wait till dll locks */
675 adr r7, kick_counter
694wait_dll_lock_timed: 676wait_dll_lock_timed:
695 ldr r4, wait_dll_lock_counter 677 ldr r4, wait_dll_lock_counter
696 add r4, r4, #1 678 add r4, r4, #1
697 str r4, wait_dll_lock_counter 679 str r4, [r7, #wait_dll_lock_counter - kick_counter]
698 ldr r4, sdrc_dlla_status 680 ldr r4, sdrc_dlla_status
699 /* Wait 20uS for lock */ 681 /* Wait 20uS for lock */
700 mov r6, #8 682 mov r6, #8
@@ -720,9 +702,10 @@ kick_dll:
720 dsb 702 dsb
721 ldr r4, kick_counter 703 ldr r4, kick_counter
722 add r4, r4, #1 704 add r4, r4, #1
723 str r4, kick_counter 705 str r4, [r7] @ kick_counter
724 b wait_dll_lock_timed 706 b wait_dll_lock_timed
725 707
708 .align
726cm_idlest1_core: 709cm_idlest1_core:
727 .word CM_IDLEST1_CORE_V 710 .word CM_IDLEST1_CORE_V
728cm_idlest_ckgen: 711cm_idlest_ckgen:
@@ -765,6 +748,7 @@ kick_counter:
765 .word 0 748 .word 0
766wait_dll_lock_counter: 749wait_dll_lock_counter:
767 .word 0 750 .word 0
751ENDPROC(omap34xx_cpu_suspend)
768 752
769ENTRY(omap34xx_cpu_suspend_sz) 753ENTRY(omap34xx_cpu_suspend_sz)
770 .word . - omap34xx_cpu_suspend 754 .word . - omap34xx_cpu_suspend