diff options
Diffstat (limited to 'arch/arm/mach-omap2/sleep34xx.S')
-rw-r--r-- | arch/arm/mach-omap2/sleep34xx.S | 836 |
1 files changed, 466 insertions, 370 deletions
diff --git a/arch/arm/mach-omap2/sleep34xx.S b/arch/arm/mach-omap2/sleep34xx.S index 2fb205a7f285..98d8232808b8 100644 --- a/arch/arm/mach-omap2/sleep34xx.S +++ b/arch/arm/mach-omap2/sleep34xx.S | |||
@@ -1,6 +1,4 @@ | |||
1 | /* | 1 | /* |
2 | * linux/arch/arm/mach-omap2/sleep.S | ||
3 | * | ||
4 | * (C) Copyright 2007 | 2 | * (C) Copyright 2007 |
5 | * Texas Instruments | 3 | * Texas Instruments |
6 | * Karthik Dasu <karthik-dp@ti.com> | 4 | * Karthik Dasu <karthik-dp@ti.com> |
@@ -26,28 +24,35 @@ | |||
26 | */ | 24 | */ |
27 | #include <linux/linkage.h> | 25 | #include <linux/linkage.h> |
28 | #include <asm/assembler.h> | 26 | #include <asm/assembler.h> |
27 | #include <plat/sram.h> | ||
29 | #include <mach/io.h> | 28 | #include <mach/io.h> |
30 | 29 | ||
31 | #include "cm.h" | 30 | #include "cm2xxx_3xxx.h" |
32 | #include "prm.h" | 31 | #include "prm2xxx_3xxx.h" |
33 | #include "sdrc.h" | 32 | #include "sdrc.h" |
34 | #include "control.h" | 33 | #include "control.h" |
35 | 34 | ||
36 | #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c | 35 | /* |
37 | 36 | * Registers access definitions | |
38 | #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \ | 37 | */ |
39 | OMAP3430_PM_PREPWSTST) | 38 | #define SDRC_SCRATCHPAD_SEM_OFFS 0xc |
40 | #define PM_PREPWSTST_CORE_P 0x48306AE8 | 39 | #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\ |
41 | #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \ | 40 | (SDRC_SCRATCHPAD_SEM_OFFS) |
42 | OMAP3430_PM_PREPWSTST) | 41 | #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\ |
42 | OMAP3430_PM_PREPWSTST | ||
43 | #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL | 43 | #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL |
44 | #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) | 44 | #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) |
45 | #define SRAM_BASE_P 0x40200000 | 45 | #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST) |
46 | #define CONTROL_STAT 0x480022F0 | 46 | #define SRAM_BASE_P OMAP3_SRAM_PA |
47 | #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is | 47 | #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS |
48 | * available */ | 48 | #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\ |
49 | #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\ | 49 | OMAP36XX_CONTROL_MEM_RTA_CTRL) |
50 | + SCRATCHPAD_MEM_OFFS) | 50 | |
51 | /* Move this as correct place is available */ | ||
52 | #define SCRATCHPAD_MEM_OFFS 0x310 | ||
53 | #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\ | ||
54 | OMAP343X_CONTROL_MEM_WKUP +\ | ||
55 | SCRATCHPAD_MEM_OFFS) | ||
51 | #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) | 56 | #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) |
52 | #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) | 57 | #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) |
53 | #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) | 58 | #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) |
@@ -59,48 +64,38 @@ | |||
59 | #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) | 64 | #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) |
60 | #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) | 65 | #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) |
61 | 66 | ||
62 | .text | 67 | |
63 | /* Function to acquire the semaphore in scratchpad */ | 68 | /* |
64 | ENTRY(lock_scratchpad_sem) | 69 | * API functions |
65 | stmfd sp!, {lr} @ save registers on stack | 70 | */ |
66 | wait_sem: | 71 | |
67 | mov r0,#1 | 72 | /* |
68 | ldr r1, sdrc_scratchpad_sem | 73 | * The "get_*restore_pointer" functions are used to provide a |
69 | wait_loop: | 74 | * physical restore address where the ROM code jumps while waking |
70 | ldr r2, [r1] @ load the lock value | 75 | * up from MPU OFF/OSWR state. |
71 | cmp r2, r0 @ is the lock free ? | 76 | * The restore pointer is stored into the scratchpad. |
72 | beq wait_loop @ not free... | 77 | */ |
73 | swp r2, r0, [r1] @ semaphore free so lock it and proceed | ||
74 | cmp r2, r0 @ did we succeed ? | ||
75 | beq wait_sem @ no - try again | ||
76 | ldmfd sp!, {pc} @ restore regs and return | ||
77 | sdrc_scratchpad_sem: | ||
78 | .word SDRC_SCRATCHPAD_SEM_V | ||
79 | ENTRY(lock_scratchpad_sem_sz) | ||
80 | .word . - lock_scratchpad_sem | ||
81 | |||
82 | .text | ||
83 | /* Function to release the scratchpad semaphore */ | ||
84 | ENTRY(unlock_scratchpad_sem) | ||
85 | stmfd sp!, {lr} @ save registers on stack | ||
86 | ldr r3, sdrc_scratchpad_sem | ||
87 | mov r2,#0 | ||
88 | str r2,[r3] | ||
89 | ldmfd sp!, {pc} @ restore regs and return | ||
90 | ENTRY(unlock_scratchpad_sem_sz) | ||
91 | .word . - unlock_scratchpad_sem | ||
92 | 78 | ||
93 | .text | 79 | .text |
94 | /* Function call to get the restore pointer for resume from OFF */ | 80 | /* Function call to get the restore pointer for resume from OFF */ |
95 | ENTRY(get_restore_pointer) | 81 | ENTRY(get_restore_pointer) |
96 | stmfd sp!, {lr} @ save registers on stack | 82 | stmfd sp!, {lr} @ save registers on stack |
97 | adr r0, restore | 83 | adr r0, restore |
98 | ldmfd sp!, {pc} @ restore regs and return | 84 | ldmfd sp!, {pc} @ restore regs and return |
99 | ENTRY(get_restore_pointer_sz) | 85 | ENTRY(get_restore_pointer_sz) |
100 | .word . - get_restore_pointer | 86 | .word . - get_restore_pointer |
101 | 87 | ||
102 | .text | 88 | .text |
103 | /* Function call to get the restore pointer for for ES3 to resume from OFF */ | 89 | /* Function call to get the restore pointer for 3630 resume from OFF */ |
90 | ENTRY(get_omap3630_restore_pointer) | ||
91 | stmfd sp!, {lr} @ save registers on stack | ||
92 | adr r0, restore_3630 | ||
93 | ldmfd sp!, {pc} @ restore regs and return | ||
94 | ENTRY(get_omap3630_restore_pointer_sz) | ||
95 | .word . - get_omap3630_restore_pointer | ||
96 | |||
97 | .text | ||
98 | /* Function call to get the restore pointer for ES3 to resume from OFF */ | ||
104 | ENTRY(get_es3_restore_pointer) | 99 | ENTRY(get_es3_restore_pointer) |
105 | stmfd sp!, {lr} @ save registers on stack | 100 | stmfd sp!, {lr} @ save registers on stack |
106 | adr r0, restore_es3 | 101 | adr r0, restore_es3 |
@@ -108,54 +103,23 @@ ENTRY(get_es3_restore_pointer) | |||
108 | ENTRY(get_es3_restore_pointer_sz) | 103 | ENTRY(get_es3_restore_pointer_sz) |
109 | .word . - get_es3_restore_pointer | 104 | .word . - get_es3_restore_pointer |
110 | 105 | ||
111 | ENTRY(es3_sdrc_fix) | 106 | .text |
112 | ldr r4, sdrc_syscfg @ get config addr | 107 | /* |
113 | ldr r5, [r4] @ get value | 108 | * L2 cache needs to be toggled for stable OFF mode functionality on 3630. |
114 | tst r5, #0x100 @ is part access blocked | 109 | * This function sets up a flag that will allow for this toggling to take |
115 | it eq | 110 | * place on 3630. Hopefully some version in the future may not need this. |
116 | biceq r5, r5, #0x100 @ clear bit if set | 111 | */ |
117 | str r5, [r4] @ write back change | 112 | ENTRY(enable_omap3630_toggle_l2_on_restore) |
118 | ldr r4, sdrc_mr_0 @ get config addr | 113 | stmfd sp!, {lr} @ save registers on stack |
119 | ldr r5, [r4] @ get value | 114 | /* Setup so that we will disable and enable l2 */ |
120 | str r5, [r4] @ write back change | 115 | mov r1, #0x1 |
121 | ldr r4, sdrc_emr2_0 @ get config addr | 116 | str r1, l2dis_3630 |
122 | ldr r5, [r4] @ get value | 117 | ldmfd sp!, {pc} @ restore regs and return |
123 | str r5, [r4] @ write back change | ||
124 | ldr r4, sdrc_manual_0 @ get config addr | ||
125 | mov r5, #0x2 @ autorefresh command | ||
126 | str r5, [r4] @ kick off refreshes | ||
127 | ldr r4, sdrc_mr_1 @ get config addr | ||
128 | ldr r5, [r4] @ get value | ||
129 | str r5, [r4] @ write back change | ||
130 | ldr r4, sdrc_emr2_1 @ get config addr | ||
131 | ldr r5, [r4] @ get value | ||
132 | str r5, [r4] @ write back change | ||
133 | ldr r4, sdrc_manual_1 @ get config addr | ||
134 | mov r5, #0x2 @ autorefresh command | ||
135 | str r5, [r4] @ kick off refreshes | ||
136 | bx lr | ||
137 | sdrc_syscfg: | ||
138 | .word SDRC_SYSCONFIG_P | ||
139 | sdrc_mr_0: | ||
140 | .word SDRC_MR_0_P | ||
141 | sdrc_emr2_0: | ||
142 | .word SDRC_EMR2_0_P | ||
143 | sdrc_manual_0: | ||
144 | .word SDRC_MANUAL_0_P | ||
145 | sdrc_mr_1: | ||
146 | .word SDRC_MR_1_P | ||
147 | sdrc_emr2_1: | ||
148 | .word SDRC_EMR2_1_P | ||
149 | sdrc_manual_1: | ||
150 | .word SDRC_MANUAL_1_P | ||
151 | ENTRY(es3_sdrc_fix_sz) | ||
152 | .word . - es3_sdrc_fix | ||
153 | 118 | ||
119 | .text | ||
154 | /* Function to call rom code to save secure ram context */ | 120 | /* Function to call rom code to save secure ram context */ |
155 | ENTRY(save_secure_ram_context) | 121 | ENTRY(save_secure_ram_context) |
156 | stmfd sp!, {r1-r12, lr} @ save registers on stack | 122 | stmfd sp!, {r1-r12, lr} @ save registers on stack |
157 | save_secure_ram_debug: | ||
158 | /* b save_secure_ram_debug */ @ enable to debug save code | ||
159 | adr r3, api_params @ r3 points to parameters | 123 | adr r3, api_params @ r3 points to parameters |
160 | str r0, [r3,#0x4] @ r0 has sdram address | 124 | str r0, [r3,#0x4] @ r0 has sdram address |
161 | ldr r12, high_mask | 125 | ldr r12, high_mask |
@@ -185,35 +149,162 @@ ENTRY(save_secure_ram_context_sz) | |||
185 | .word . - save_secure_ram_context | 149 | .word . - save_secure_ram_context |
186 | 150 | ||
187 | /* | 151 | /* |
152 | * ====================== | ||
153 | * == Idle entry point == | ||
154 | * ====================== | ||
155 | */ | ||
156 | |||
157 | /* | ||
188 | * Forces OMAP into idle state | 158 | * Forces OMAP into idle state |
189 | * | 159 | * |
190 | * omap34xx_suspend() - This bit of code just executes the WFI | 160 | * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed |
191 | * for normal idles. | 161 | * and executes the WFI instruction. Calling WFI effectively changes the |
162 | * power domains states to the desired target power states. | ||
163 | * | ||
192 | * | 164 | * |
193 | * Note: This code get's copied to internal SRAM at boot. When the OMAP | 165 | * Notes: |
194 | * wakes up it continues execution at the point it went to sleep. | 166 | * - this code gets copied to internal SRAM at boot and after wake-up |
167 | * from OFF mode. The execution pointer in SRAM is _omap_sram_idle. | ||
168 | * - when the OMAP wakes up it continues at different execution points | ||
169 | * depending on the low power mode (non-OFF vs OFF modes), | ||
170 | * cf. 'Resume path for xxx mode' comments. | ||
195 | */ | 171 | */ |
196 | ENTRY(omap34xx_cpu_suspend) | 172 | ENTRY(omap34xx_cpu_suspend) |
197 | stmfd sp!, {r0-r12, lr} @ save registers on stack | 173 | stmfd sp!, {r0-r12, lr} @ save registers on stack |
198 | loop: | ||
199 | /*b loop*/ @Enable to debug by stepping through code | ||
200 | /* r0 contains restore pointer in sdram */ | ||
201 | /* r1 contains information about saving context */ | ||
202 | ldr r4, sdrc_power @ read the SDRC_POWER register | ||
203 | ldr r5, [r4] @ read the contents of SDRC_POWER | ||
204 | orr r5, r5, #0x40 @ enable self refresh on idle req | ||
205 | str r5, [r4] @ write back to SDRC_POWER register | ||
206 | 174 | ||
175 | /* | ||
176 | * r0 contains restore pointer in sdram | ||
177 | * r1 contains information about saving context: | ||
178 | * 0 - No context lost | ||
179 | * 1 - Only L1 and logic lost | ||
180 | * 2 - Only L2 lost | ||
181 | * 3 - Both L1 and L2 lost | ||
182 | */ | ||
183 | |||
184 | /* Directly jump to WFI is the context save is not required */ | ||
207 | cmp r1, #0x0 | 185 | cmp r1, #0x0 |
208 | /* If context save is required, do that and execute wfi */ | 186 | beq omap3_do_wfi |
209 | bne save_context_wfi | 187 | |
188 | /* Otherwise fall through to the save context code */ | ||
189 | save_context_wfi: | ||
190 | mov r8, r0 @ Store SDRAM address in r8 | ||
191 | mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register | ||
192 | mov r4, #0x1 @ Number of parameters for restore call | ||
193 | stmia r8!, {r4-r5} @ Push parameters for restore call | ||
194 | mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register | ||
195 | stmia r8!, {r4-r5} @ Push parameters for restore call | ||
196 | |||
197 | /* Check what that target sleep state is from r1 */ | ||
198 | cmp r1, #0x2 @ Only L2 lost, no need to save context | ||
199 | beq clean_caches | ||
200 | |||
201 | l1_logic_lost: | ||
202 | /* Store sp and spsr to SDRAM */ | ||
203 | mov r4, sp | ||
204 | mrs r5, spsr | ||
205 | mov r6, lr | ||
206 | stmia r8!, {r4-r6} | ||
207 | /* Save all ARM registers */ | ||
208 | /* Coprocessor access control register */ | ||
209 | mrc p15, 0, r6, c1, c0, 2 | ||
210 | stmia r8!, {r6} | ||
211 | /* TTBR0, TTBR1 and Translation table base control */ | ||
212 | mrc p15, 0, r4, c2, c0, 0 | ||
213 | mrc p15, 0, r5, c2, c0, 1 | ||
214 | mrc p15, 0, r6, c2, c0, 2 | ||
215 | stmia r8!, {r4-r6} | ||
216 | /* | ||
217 | * Domain access control register, data fault status register, | ||
218 | * and instruction fault status register | ||
219 | */ | ||
220 | mrc p15, 0, r4, c3, c0, 0 | ||
221 | mrc p15, 0, r5, c5, c0, 0 | ||
222 | mrc p15, 0, r6, c5, c0, 1 | ||
223 | stmia r8!, {r4-r6} | ||
224 | /* | ||
225 | * Data aux fault status register, instruction aux fault status, | ||
226 | * data fault address register and instruction fault address register | ||
227 | */ | ||
228 | mrc p15, 0, r4, c5, c1, 0 | ||
229 | mrc p15, 0, r5, c5, c1, 1 | ||
230 | mrc p15, 0, r6, c6, c0, 0 | ||
231 | mrc p15, 0, r7, c6, c0, 2 | ||
232 | stmia r8!, {r4-r7} | ||
233 | /* | ||
234 | * user r/w thread and process ID, user r/o thread and process ID, | ||
235 | * priv only thread and process ID, cache size selection | ||
236 | */ | ||
237 | mrc p15, 0, r4, c13, c0, 2 | ||
238 | mrc p15, 0, r5, c13, c0, 3 | ||
239 | mrc p15, 0, r6, c13, c0, 4 | ||
240 | mrc p15, 2, r7, c0, c0, 0 | ||
241 | stmia r8!, {r4-r7} | ||
242 | /* Data TLB lockdown, instruction TLB lockdown registers */ | ||
243 | mrc p15, 0, r5, c10, c0, 0 | ||
244 | mrc p15, 0, r6, c10, c0, 1 | ||
245 | stmia r8!, {r5-r6} | ||
246 | /* Secure or non secure vector base address, FCSE PID, Context PID*/ | ||
247 | mrc p15, 0, r4, c12, c0, 0 | ||
248 | mrc p15, 0, r5, c13, c0, 0 | ||
249 | mrc p15, 0, r6, c13, c0, 1 | ||
250 | stmia r8!, {r4-r6} | ||
251 | /* Primary remap, normal remap registers */ | ||
252 | mrc p15, 0, r4, c10, c2, 0 | ||
253 | mrc p15, 0, r5, c10, c2, 1 | ||
254 | stmia r8!,{r4-r5} | ||
255 | |||
256 | /* Store current cpsr*/ | ||
257 | mrs r2, cpsr | ||
258 | stmia r8!, {r2} | ||
259 | |||
260 | mrc p15, 0, r4, c1, c0, 0 | ||
261 | /* save control register */ | ||
262 | stmia r8!, {r4} | ||
263 | |||
264 | clean_caches: | ||
265 | /* | ||
266 | * Clean Data or unified cache to POU | ||
267 | * How to invalidate only L1 cache???? - #FIX_ME# | ||
268 | * mcr p15, 0, r11, c7, c11, 1 | ||
269 | */ | ||
270 | cmp r1, #0x1 @ Check whether L2 inval is required | ||
271 | beq omap3_do_wfi | ||
272 | |||
273 | clean_l2: | ||
274 | /* | ||
275 | * jump out to kernel flush routine | ||
276 | * - reuse that code is better | ||
277 | * - it executes in a cached space so is faster than refetch per-block | ||
278 | * - should be faster and will change with kernel | ||
279 | * - 'might' have to copy address, load and jump to it | ||
280 | */ | ||
281 | ldr r1, kernel_flush | ||
282 | mov lr, pc | ||
283 | bx r1 | ||
284 | |||
285 | omap3_do_wfi: | ||
286 | ldr r4, sdrc_power @ read the SDRC_POWER register | ||
287 | ldr r5, [r4] @ read the contents of SDRC_POWER | ||
288 | orr r5, r5, #0x40 @ enable self refresh on idle req | ||
289 | str r5, [r4] @ write back to SDRC_POWER register | ||
290 | |||
210 | /* Data memory barrier and Data sync barrier */ | 291 | /* Data memory barrier and Data sync barrier */ |
211 | mov r1, #0 | 292 | mov r1, #0 |
212 | mcr p15, 0, r1, c7, c10, 4 | 293 | mcr p15, 0, r1, c7, c10, 4 |
213 | mcr p15, 0, r1, c7, c10, 5 | 294 | mcr p15, 0, r1, c7, c10, 5 |
214 | 295 | ||
296 | /* | ||
297 | * =================================== | ||
298 | * == WFI instruction => Enter idle == | ||
299 | * =================================== | ||
300 | */ | ||
215 | wfi @ wait for interrupt | 301 | wfi @ wait for interrupt |
216 | 302 | ||
303 | /* | ||
304 | * =================================== | ||
305 | * == Resume path for non-OFF modes == | ||
306 | * =================================== | ||
307 | */ | ||
217 | nop | 308 | nop |
218 | nop | 309 | nop |
219 | nop | 310 | nop |
@@ -226,9 +317,30 @@ loop: | |||
226 | nop | 317 | nop |
227 | bl wait_sdrc_ok | 318 | bl wait_sdrc_ok |
228 | 319 | ||
229 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 320 | /* |
321 | * =================================== | ||
322 | * == Exit point from non-OFF modes == | ||
323 | * =================================== | ||
324 | */ | ||
325 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | ||
326 | |||
327 | |||
328 | /* | ||
329 | * ============================== | ||
330 | * == Resume path for OFF mode == | ||
331 | * ============================== | ||
332 | */ | ||
333 | |||
334 | /* | ||
335 | * The restore_* functions are called by the ROM code | ||
336 | * when back from WFI in OFF mode. | ||
337 | * Cf. the get_*restore_pointer functions. | ||
338 | * | ||
339 | * restore_es3: applies to 34xx >= ES3.0 | ||
340 | * restore_3630: applies to 36xx | ||
341 | * restore: common code for 3xxx | ||
342 | */ | ||
230 | restore_es3: | 343 | restore_es3: |
231 | /*b restore_es3*/ @ Enable to debug restore code | ||
232 | ldr r5, pm_prepwstst_core_p | 344 | ldr r5, pm_prepwstst_core_p |
233 | ldr r4, [r5] | 345 | ldr r4, [r5] |
234 | and r4, r4, #0x3 | 346 | and r4, r4, #0x3 |
@@ -245,82 +357,117 @@ copy_to_sram: | |||
245 | bne copy_to_sram | 357 | bne copy_to_sram |
246 | ldr r1, sram_base | 358 | ldr r1, sram_base |
247 | blx r1 | 359 | blx r1 |
360 | b restore | ||
361 | |||
362 | restore_3630: | ||
363 | ldr r1, pm_prepwstst_core_p | ||
364 | ldr r2, [r1] | ||
365 | and r2, r2, #0x3 | ||
366 | cmp r2, #0x0 @ Check if previous power state of CORE is OFF | ||
367 | bne restore | ||
368 | /* Disable RTA before giving control */ | ||
369 | ldr r1, control_mem_rta | ||
370 | mov r2, #OMAP36XX_RTA_DISABLE | ||
371 | str r2, [r1] | ||
372 | |||
373 | /* Fall through to common code for the remaining logic */ | ||
374 | |||
248 | restore: | 375 | restore: |
249 | /* b restore*/ @ Enable to debug restore code | 376 | /* |
250 | /* Check what was the reason for mpu reset and store the reason in r9*/ | 377 | * Check what was the reason for mpu reset and store the reason in r9: |
251 | /* 1 - Only L1 and logic lost */ | 378 | * 0 - No context lost |
252 | /* 2 - Only L2 lost - In this case, we wont be here */ | 379 | * 1 - Only L1 and logic lost |
253 | /* 3 - Both L1 and L2 lost */ | 380 | * 2 - Only L2 lost - In this case, we wont be here |
254 | ldr r1, pm_pwstctrl_mpu | 381 | * 3 - Both L1 and L2 lost |
382 | */ | ||
383 | ldr r1, pm_pwstctrl_mpu | ||
255 | ldr r2, [r1] | 384 | ldr r2, [r1] |
256 | and r2, r2, #0x3 | 385 | and r2, r2, #0x3 |
257 | cmp r2, #0x0 @ Check if target power state was OFF or RET | 386 | cmp r2, #0x0 @ Check if target power state was OFF or RET |
258 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost | 387 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost |
259 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation | 388 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation |
260 | bne logic_l1_restore | 389 | bne logic_l1_restore |
390 | |||
391 | ldr r0, l2dis_3630 | ||
392 | cmp r0, #0x1 @ should we disable L2 on 3630? | ||
393 | bne skipl2dis | ||
394 | mrc p15, 0, r0, c1, c0, 1 | ||
395 | bic r0, r0, #2 @ disable L2 cache | ||
396 | mcr p15, 0, r0, c1, c0, 1 | ||
397 | skipl2dis: | ||
261 | ldr r0, control_stat | 398 | ldr r0, control_stat |
262 | ldr r1, [r0] | 399 | ldr r1, [r0] |
263 | and r1, #0x700 | 400 | and r1, #0x700 |
264 | cmp r1, #0x300 | 401 | cmp r1, #0x300 |
265 | beq l2_inv_gp | 402 | beq l2_inv_gp |
266 | mov r0, #40 @ set service ID for PPA | 403 | mov r0, #40 @ set service ID for PPA |
267 | mov r12, r0 @ copy secure Service ID in r12 | 404 | mov r12, r0 @ copy secure Service ID in r12 |
268 | mov r1, #0 @ set task id for ROM code in r1 | 405 | mov r1, #0 @ set task id for ROM code in r1 |
269 | mov r2, #4 @ set some flags in r2, r6 | 406 | mov r2, #4 @ set some flags in r2, r6 |
270 | mov r6, #0xff | 407 | mov r6, #0xff |
271 | adr r3, l2_inv_api_params @ r3 points to dummy parameters | 408 | adr r3, l2_inv_api_params @ r3 points to dummy parameters |
272 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 409 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
273 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 410 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
274 | .word 0xE1600071 @ call SMI monitor (smi #1) | 411 | .word 0xE1600071 @ call SMI monitor (smi #1) |
275 | /* Write to Aux control register to set some bits */ | 412 | /* Write to Aux control register to set some bits */ |
276 | mov r0, #42 @ set service ID for PPA | 413 | mov r0, #42 @ set service ID for PPA |
277 | mov r12, r0 @ copy secure Service ID in r12 | 414 | mov r12, r0 @ copy secure Service ID in r12 |
278 | mov r1, #0 @ set task id for ROM code in r1 | 415 | mov r1, #0 @ set task id for ROM code in r1 |
279 | mov r2, #4 @ set some flags in r2, r6 | 416 | mov r2, #4 @ set some flags in r2, r6 |
280 | mov r6, #0xff | 417 | mov r6, #0xff |
281 | ldr r4, scratchpad_base | 418 | ldr r4, scratchpad_base |
282 | ldr r3, [r4, #0xBC] @ r3 points to parameters | 419 | ldr r3, [r4, #0xBC] @ r3 points to parameters |
283 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 420 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
284 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 421 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
285 | .word 0xE1600071 @ call SMI monitor (smi #1) | 422 | .word 0xE1600071 @ call SMI monitor (smi #1) |
286 | 423 | ||
287 | #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE | 424 | #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE |
288 | /* Restore L2 aux control register */ | 425 | /* Restore L2 aux control register */ |
289 | @ set service ID for PPA | 426 | @ set service ID for PPA |
290 | mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID | 427 | mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID |
291 | mov r12, r0 @ copy service ID in r12 | 428 | mov r12, r0 @ copy service ID in r12 |
292 | mov r1, #0 @ set task ID for ROM code in r1 | 429 | mov r1, #0 @ set task ID for ROM code in r1 |
293 | mov r2, #4 @ set some flags in r2, r6 | 430 | mov r2, #4 @ set some flags in r2, r6 |
294 | mov r6, #0xff | 431 | mov r6, #0xff |
295 | ldr r4, scratchpad_base | 432 | ldr r4, scratchpad_base |
296 | ldr r3, [r4, #0xBC] | 433 | ldr r3, [r4, #0xBC] |
297 | adds r3, r3, #8 @ r3 points to parameters | 434 | adds r3, r3, #8 @ r3 points to parameters |
298 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 435 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
299 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 436 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
300 | .word 0xE1600071 @ call SMI monitor (smi #1) | 437 | .word 0xE1600071 @ call SMI monitor (smi #1) |
301 | #endif | 438 | #endif |
302 | b logic_l1_restore | 439 | b logic_l1_restore |
440 | |||
303 | l2_inv_api_params: | 441 | l2_inv_api_params: |
304 | .word 0x1, 0x00 | 442 | .word 0x1, 0x00 |
305 | l2_inv_gp: | 443 | l2_inv_gp: |
306 | /* Execute smi to invalidate L2 cache */ | 444 | /* Execute smi to invalidate L2 cache */ |
307 | mov r12, #0x1 @ set up to invalide L2 | 445 | mov r12, #0x1 @ set up to invalidate L2 |
308 | smi: .word 0xE1600070 @ Call SMI monitor (smieq) | 446 | .word 0xE1600070 @ Call SMI monitor (smieq) |
309 | /* Write to Aux control register to set some bits */ | 447 | /* Write to Aux control register to set some bits */ |
310 | ldr r4, scratchpad_base | 448 | ldr r4, scratchpad_base |
311 | ldr r3, [r4,#0xBC] | 449 | ldr r3, [r4,#0xBC] |
312 | ldr r0, [r3,#4] | 450 | ldr r0, [r3,#4] |
313 | mov r12, #0x3 | 451 | mov r12, #0x3 |
314 | .word 0xE1600070 @ Call SMI monitor (smieq) | 452 | .word 0xE1600070 @ Call SMI monitor (smieq) |
315 | ldr r4, scratchpad_base | 453 | ldr r4, scratchpad_base |
316 | ldr r3, [r4,#0xBC] | 454 | ldr r3, [r4,#0xBC] |
317 | ldr r0, [r3,#12] | 455 | ldr r0, [r3,#12] |
318 | mov r12, #0x2 | 456 | mov r12, #0x2 |
319 | .word 0xE1600070 @ Call SMI monitor (smieq) | 457 | .word 0xE1600070 @ Call SMI monitor (smieq) |
320 | logic_l1_restore: | 458 | logic_l1_restore: |
459 | ldr r1, l2dis_3630 | ||
460 | cmp r1, #0x1 @ Test if L2 re-enable needed on 3630 | ||
461 | bne skipl2reen | ||
462 | mrc p15, 0, r1, c1, c0, 1 | ||
463 | orr r1, r1, #2 @ re-enable L2 cache | ||
464 | mcr p15, 0, r1, c1, c0, 1 | ||
465 | skipl2reen: | ||
321 | mov r1, #0 | 466 | mov r1, #0 |
322 | /* Invalidate all instruction caches to PoU | 467 | /* |
323 | * and flush branch target cache */ | 468 | * Invalidate all instruction caches to PoU |
469 | * and flush branch target cache | ||
470 | */ | ||
324 | mcr p15, 0, r1, c7, c5, 0 | 471 | mcr p15, 0, r1, c7, c5, 0 |
325 | 472 | ||
326 | ldr r4, scratchpad_base | 473 | ldr r4, scratchpad_base |
@@ -341,33 +488,33 @@ logic_l1_restore: | |||
341 | MCR p15, 0, r6, c2, c0, 1 | 488 | MCR p15, 0, r6, c2, c0, 1 |
342 | /* Translation table base control register */ | 489 | /* Translation table base control register */ |
343 | MCR p15, 0, r7, c2, c0, 2 | 490 | MCR p15, 0, r7, c2, c0, 2 |
344 | /*domain access Control Register */ | 491 | /* Domain access Control Register */ |
345 | MCR p15, 0, r8, c3, c0, 0 | 492 | MCR p15, 0, r8, c3, c0, 0 |
346 | /* data fault status Register */ | 493 | /* Data fault status Register */ |
347 | MCR p15, 0, r9, c5, c0, 0 | 494 | MCR p15, 0, r9, c5, c0, 0 |
348 | 495 | ||
349 | ldmia r3!,{r4-r8} | 496 | ldmia r3!,{r4-r8} |
350 | /* instruction fault status Register */ | 497 | /* Instruction fault status Register */ |
351 | MCR p15, 0, r4, c5, c0, 1 | 498 | MCR p15, 0, r4, c5, c0, 1 |
352 | /*Data Auxiliary Fault Status Register */ | 499 | /* Data Auxiliary Fault Status Register */ |
353 | MCR p15, 0, r5, c5, c1, 0 | 500 | MCR p15, 0, r5, c5, c1, 0 |
354 | /*Instruction Auxiliary Fault Status Register*/ | 501 | /* Instruction Auxiliary Fault Status Register*/ |
355 | MCR p15, 0, r6, c5, c1, 1 | 502 | MCR p15, 0, r6, c5, c1, 1 |
356 | /*Data Fault Address Register */ | 503 | /* Data Fault Address Register */ |
357 | MCR p15, 0, r7, c6, c0, 0 | 504 | MCR p15, 0, r7, c6, c0, 0 |
358 | /*Instruction Fault Address Register*/ | 505 | /* Instruction Fault Address Register*/ |
359 | MCR p15, 0, r8, c6, c0, 2 | 506 | MCR p15, 0, r8, c6, c0, 2 |
360 | ldmia r3!,{r4-r7} | 507 | ldmia r3!,{r4-r7} |
361 | 508 | ||
362 | /* user r/w thread and process ID */ | 509 | /* User r/w thread and process ID */ |
363 | MCR p15, 0, r4, c13, c0, 2 | 510 | MCR p15, 0, r4, c13, c0, 2 |
364 | /* user ro thread and process ID */ | 511 | /* User ro thread and process ID */ |
365 | MCR p15, 0, r5, c13, c0, 3 | 512 | MCR p15, 0, r5, c13, c0, 3 |
366 | /*Privileged only thread and process ID */ | 513 | /* Privileged only thread and process ID */ |
367 | MCR p15, 0, r6, c13, c0, 4 | 514 | MCR p15, 0, r6, c13, c0, 4 |
368 | /* cache size selection */ | 515 | /* Cache size selection */ |
369 | MCR p15, 2, r7, c0, c0, 0 | 516 | MCR p15, 2, r7, c0, c0, 0 |
370 | ldmia r3!,{r4-r8} | 517 | ldmia r3!,{r4-r8} |
371 | /* Data TLB lockdown registers */ | 518 | /* Data TLB lockdown registers */ |
372 | MCR p15, 0, r4, c10, c0, 0 | 519 | MCR p15, 0, r4, c10, c0, 0 |
373 | /* Instruction TLB lockdown registers */ | 520 | /* Instruction TLB lockdown registers */ |
@@ -379,26 +526,27 @@ logic_l1_restore: | |||
379 | /* Context PID */ | 526 | /* Context PID */ |
380 | MCR p15, 0, r8, c13, c0, 1 | 527 | MCR p15, 0, r8, c13, c0, 1 |
381 | 528 | ||
382 | ldmia r3!,{r4-r5} | 529 | ldmia r3!,{r4-r5} |
383 | /* primary memory remap register */ | 530 | /* Primary memory remap register */ |
384 | MCR p15, 0, r4, c10, c2, 0 | 531 | MCR p15, 0, r4, c10, c2, 0 |
385 | /*normal memory remap register */ | 532 | /* Normal memory remap register */ |
386 | MCR p15, 0, r5, c10, c2, 1 | 533 | MCR p15, 0, r5, c10, c2, 1 |
387 | 534 | ||
388 | /* Restore cpsr */ | 535 | /* Restore cpsr */ |
389 | ldmia r3!,{r4} /*load CPSR from SDRAM*/ | 536 | ldmia r3!,{r4} @ load CPSR from SDRAM |
390 | msr cpsr, r4 /*store cpsr */ | 537 | msr cpsr, r4 @ store cpsr |
391 | 538 | ||
392 | /* Enabling MMU here */ | 539 | /* Enabling MMU here */ |
393 | mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ | 540 | mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl |
394 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ | 541 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */ |
395 | and r7, #0x7 | 542 | and r7, #0x7 |
396 | cmp r7, #0x0 | 543 | cmp r7, #0x0 |
397 | beq usettbr0 | 544 | beq usettbr0 |
398 | ttbr_error: | 545 | ttbr_error: |
399 | /* More work needs to be done to support N[0:2] value other than 0 | 546 | /* |
400 | * So looping here so that the error can be detected | 547 | * More work needs to be done to support N[0:2] value other than 0 |
401 | */ | 548 | * So looping here so that the error can be detected |
549 | */ | ||
402 | b ttbr_error | 550 | b ttbr_error |
403 | usettbr0: | 551 | usettbr0: |
404 | mrc p15, 0, r2, c2, c0, 0 | 552 | mrc p15, 0, r2, c2, c0, 0 |
@@ -406,21 +554,25 @@ usettbr0: | |||
406 | and r2, r5 | 554 | and r2, r5 |
407 | mov r4, pc | 555 | mov r4, pc |
408 | ldr r5, table_index_mask | 556 | ldr r5, table_index_mask |
409 | and r4, r5 /* r4 = 31 to 20 bits of pc */ | 557 | and r4, r5 @ r4 = 31 to 20 bits of pc |
410 | /* Extract the value to be written to table entry */ | 558 | /* Extract the value to be written to table entry */ |
411 | ldr r1, table_entry | 559 | ldr r1, table_entry |
412 | add r1, r1, r4 /* r1 has value to be written to table entry*/ | 560 | /* r1 has the value to be written to table entry*/ |
561 | add r1, r1, r4 | ||
413 | /* Getting the address of table entry to modify */ | 562 | /* Getting the address of table entry to modify */ |
414 | lsr r4, #18 | 563 | lsr r4, #18 |
415 | add r2, r4 /* r2 has the location which needs to be modified */ | 564 | /* r2 has the location which needs to be modified */ |
565 | add r2, r4 | ||
416 | /* Storing previous entry of location being modified */ | 566 | /* Storing previous entry of location being modified */ |
417 | ldr r5, scratchpad_base | 567 | ldr r5, scratchpad_base |
418 | ldr r4, [r2] | 568 | ldr r4, [r2] |
419 | str r4, [r5, #0xC0] | 569 | str r4, [r5, #0xC0] |
420 | /* Modify the table entry */ | 570 | /* Modify the table entry */ |
421 | str r1, [r2] | 571 | str r1, [r2] |
422 | /* Storing address of entry being modified | 572 | /* |
423 | * - will be restored after enabling MMU */ | 573 | * Storing address of entry being modified |
574 | * - will be restored after enabling MMU | ||
575 | */ | ||
424 | ldr r5, scratchpad_base | 576 | ldr r5, scratchpad_base |
425 | str r2, [r5, #0xC4] | 577 | str r2, [r5, #0xC4] |
426 | 578 | ||
@@ -429,8 +581,11 @@ usettbr0: | |||
429 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array | 581 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array |
430 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB | 582 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB |
431 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB | 583 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB |
432 | /* Restore control register but dont enable caches here*/ | 584 | /* |
433 | /* Caches will be enabled after restoring MMU table entry */ | 585 | * Restore control register. This enables the MMU. |
586 | * The caches and prediction are not enabled here, they | ||
587 | * will be enabled after restoring the MMU table entry. | ||
588 | */ | ||
434 | ldmia r3!, {r4} | 589 | ldmia r3!, {r4} |
435 | /* Store previous value of control register in scratchpad */ | 590 | /* Store previous value of control register in scratchpad */ |
436 | str r4, [r5, #0xC8] | 591 | str r4, [r5, #0xC8] |
@@ -438,212 +593,144 @@ usettbr0: | |||
438 | and r4, r2 | 593 | and r4, r2 |
439 | mcr p15, 0, r4, c1, c0, 0 | 594 | mcr p15, 0, r4, c1, c0, 0 |
440 | 595 | ||
441 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 596 | /* |
442 | save_context_wfi: | 597 | * ============================== |
443 | /*b save_context_wfi*/ @ enable to debug save code | 598 | * == Exit point from OFF mode == |
444 | mov r8, r0 /* Store SDRAM address in r8 */ | 599 | * ============================== |
445 | mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register | 600 | */ |
446 | mov r4, #0x1 @ Number of parameters for restore call | 601 | ldmfd sp!, {r0-r12, pc} @ restore regs and return |
447 | stmia r8!, {r4-r5} @ Push parameters for restore call | ||
448 | mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register | ||
449 | stmia r8!, {r4-r5} @ Push parameters for restore call | ||
450 | /* Check what that target sleep state is:stored in r1*/ | ||
451 | /* 1 - Only L1 and logic lost */ | ||
452 | /* 2 - Only L2 lost */ | ||
453 | /* 3 - Both L1 and L2 lost */ | ||
454 | cmp r1, #0x2 /* Only L2 lost */ | ||
455 | beq clean_l2 | ||
456 | cmp r1, #0x1 /* L2 retained */ | ||
457 | /* r9 stores whether to clean L2 or not*/ | ||
458 | moveq r9, #0x0 /* Dont Clean L2 */ | ||
459 | movne r9, #0x1 /* Clean L2 */ | ||
460 | l1_logic_lost: | ||
461 | /* Store sp and spsr to SDRAM */ | ||
462 | mov r4, sp | ||
463 | mrs r5, spsr | ||
464 | mov r6, lr | ||
465 | stmia r8!, {r4-r6} | ||
466 | /* Save all ARM registers */ | ||
467 | /* Coprocessor access control register */ | ||
468 | mrc p15, 0, r6, c1, c0, 2 | ||
469 | stmia r8!, {r6} | ||
470 | /* TTBR0, TTBR1 and Translation table base control */ | ||
471 | mrc p15, 0, r4, c2, c0, 0 | ||
472 | mrc p15, 0, r5, c2, c0, 1 | ||
473 | mrc p15, 0, r6, c2, c0, 2 | ||
474 | stmia r8!, {r4-r6} | ||
475 | /* Domain access control register, data fault status register, | ||
476 | and instruction fault status register */ | ||
477 | mrc p15, 0, r4, c3, c0, 0 | ||
478 | mrc p15, 0, r5, c5, c0, 0 | ||
479 | mrc p15, 0, r6, c5, c0, 1 | ||
480 | stmia r8!, {r4-r6} | ||
481 | /* Data aux fault status register, instruction aux fault status, | ||
482 | datat fault address register and instruction fault address register*/ | ||
483 | mrc p15, 0, r4, c5, c1, 0 | ||
484 | mrc p15, 0, r5, c5, c1, 1 | ||
485 | mrc p15, 0, r6, c6, c0, 0 | ||
486 | mrc p15, 0, r7, c6, c0, 2 | ||
487 | stmia r8!, {r4-r7} | ||
488 | /* user r/w thread and process ID, user r/o thread and process ID, | ||
489 | priv only thread and process ID, cache size selection */ | ||
490 | mrc p15, 0, r4, c13, c0, 2 | ||
491 | mrc p15, 0, r5, c13, c0, 3 | ||
492 | mrc p15, 0, r6, c13, c0, 4 | ||
493 | mrc p15, 2, r7, c0, c0, 0 | ||
494 | stmia r8!, {r4-r7} | ||
495 | /* Data TLB lockdown, instruction TLB lockdown registers */ | ||
496 | mrc p15, 0, r5, c10, c0, 0 | ||
497 | mrc p15, 0, r6, c10, c0, 1 | ||
498 | stmia r8!, {r5-r6} | ||
499 | /* Secure or non secure vector base address, FCSE PID, Context PID*/ | ||
500 | mrc p15, 0, r4, c12, c0, 0 | ||
501 | mrc p15, 0, r5, c13, c0, 0 | ||
502 | mrc p15, 0, r6, c13, c0, 1 | ||
503 | stmia r8!, {r4-r6} | ||
504 | /* Primary remap, normal remap registers */ | ||
505 | mrc p15, 0, r4, c10, c2, 0 | ||
506 | mrc p15, 0, r5, c10, c2, 1 | ||
507 | stmia r8!,{r4-r5} | ||
508 | 602 | ||
509 | /* Store current cpsr*/ | ||
510 | mrs r2, cpsr | ||
511 | stmia r8!, {r2} | ||
512 | 603 | ||
513 | mrc p15, 0, r4, c1, c0, 0 | 604 | /* |
514 | /* save control register */ | 605 | * Internal functions |
515 | stmia r8!, {r4} | 606 | */ |
516 | clean_caches: | ||
517 | /* Clean Data or unified cache to POU*/ | ||
518 | /* How to invalidate only L1 cache???? - #FIX_ME# */ | ||
519 | /* mcr p15, 0, r11, c7, c11, 1 */ | ||
520 | cmp r9, #1 /* Check whether L2 inval is required or not*/ | ||
521 | bne skip_l2_inval | ||
522 | clean_l2: | ||
523 | /* read clidr */ | ||
524 | mrc p15, 1, r0, c0, c0, 1 | ||
525 | /* extract loc from clidr */ | ||
526 | ands r3, r0, #0x7000000 | ||
527 | /* left align loc bit field */ | ||
528 | mov r3, r3, lsr #23 | ||
529 | /* if loc is 0, then no need to clean */ | ||
530 | beq finished | ||
531 | /* start clean at cache level 0 */ | ||
532 | mov r10, #0 | ||
533 | loop1: | ||
534 | /* work out 3x current cache level */ | ||
535 | add r2, r10, r10, lsr #1 | ||
536 | /* extract cache type bits from clidr*/ | ||
537 | mov r1, r0, lsr r2 | ||
538 | /* mask of the bits for current cache only */ | ||
539 | and r1, r1, #7 | ||
540 | /* see what cache we have at this level */ | ||
541 | cmp r1, #2 | ||
542 | /* skip if no cache, or just i-cache */ | ||
543 | blt skip | ||
544 | /* select current cache level in cssr */ | ||
545 | mcr p15, 2, r10, c0, c0, 0 | ||
546 | /* isb to sych the new cssr&csidr */ | ||
547 | isb | ||
548 | /* read the new csidr */ | ||
549 | mrc p15, 1, r1, c0, c0, 0 | ||
550 | /* extract the length of the cache lines */ | ||
551 | and r2, r1, #7 | ||
552 | /* add 4 (line length offset) */ | ||
553 | add r2, r2, #4 | ||
554 | ldr r4, assoc_mask | ||
555 | /* find maximum number on the way size */ | ||
556 | ands r4, r4, r1, lsr #3 | ||
557 | /* find bit position of way size increment */ | ||
558 | clz r5, r4 | ||
559 | ldr r7, numset_mask | ||
560 | /* extract max number of the index size*/ | ||
561 | ands r7, r7, r1, lsr #13 | ||
562 | loop2: | ||
563 | mov r9, r4 | ||
564 | /* create working copy of max way size*/ | ||
565 | loop3: | ||
566 | /* factor way and cache number into r11 */ | ||
567 | orr r11, r10, r9, lsl r5 | ||
568 | /* factor index number into r11 */ | ||
569 | orr r11, r11, r7, lsl r2 | ||
570 | /*clean & invalidate by set/way */ | ||
571 | mcr p15, 0, r11, c7, c10, 2 | ||
572 | /* decrement the way*/ | ||
573 | subs r9, r9, #1 | ||
574 | bge loop3 | ||
575 | /*decrement the index */ | ||
576 | subs r7, r7, #1 | ||
577 | bge loop2 | ||
578 | skip: | ||
579 | add r10, r10, #2 | ||
580 | /* increment cache number */ | ||
581 | cmp r3, r10 | ||
582 | bgt loop1 | ||
583 | finished: | ||
584 | /*swith back to cache level 0 */ | ||
585 | mov r10, #0 | ||
586 | /* select current cache level in cssr */ | ||
587 | mcr p15, 2, r10, c0, c0, 0 | ||
588 | isb | ||
589 | skip_l2_inval: | ||
590 | /* Data memory barrier and Data sync barrier */ | ||
591 | mov r1, #0 | ||
592 | mcr p15, 0, r1, c7, c10, 4 | ||
593 | mcr p15, 0, r1, c7, c10, 5 | ||
594 | 607 | ||
595 | wfi @ wait for interrupt | 608 | /* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */ |
596 | nop | 609 | .text |
597 | nop | 610 | ENTRY(es3_sdrc_fix) |
598 | nop | 611 | ldr r4, sdrc_syscfg @ get config addr |
599 | nop | 612 | ldr r5, [r4] @ get value |
600 | nop | 613 | tst r5, #0x100 @ is part access blocked |
601 | nop | 614 | it eq |
602 | nop | 615 | biceq r5, r5, #0x100 @ clear bit if set |
603 | nop | 616 | str r5, [r4] @ write back change |
604 | nop | 617 | ldr r4, sdrc_mr_0 @ get config addr |
605 | nop | 618 | ldr r5, [r4] @ get value |
606 | bl wait_sdrc_ok | 619 | str r5, [r4] @ write back change |
607 | /* restore regs and return */ | 620 | ldr r4, sdrc_emr2_0 @ get config addr |
608 | ldmfd sp!, {r0-r12, pc} | 621 | ldr r5, [r4] @ get value |
622 | str r5, [r4] @ write back change | ||
623 | ldr r4, sdrc_manual_0 @ get config addr | ||
624 | mov r5, #0x2 @ autorefresh command | ||
625 | str r5, [r4] @ kick off refreshes | ||
626 | ldr r4, sdrc_mr_1 @ get config addr | ||
627 | ldr r5, [r4] @ get value | ||
628 | str r5, [r4] @ write back change | ||
629 | ldr r4, sdrc_emr2_1 @ get config addr | ||
630 | ldr r5, [r4] @ get value | ||
631 | str r5, [r4] @ write back change | ||
632 | ldr r4, sdrc_manual_1 @ get config addr | ||
633 | mov r5, #0x2 @ autorefresh command | ||
634 | str r5, [r4] @ kick off refreshes | ||
635 | bx lr | ||
636 | |||
637 | sdrc_syscfg: | ||
638 | .word SDRC_SYSCONFIG_P | ||
639 | sdrc_mr_0: | ||
640 | .word SDRC_MR_0_P | ||
641 | sdrc_emr2_0: | ||
642 | .word SDRC_EMR2_0_P | ||
643 | sdrc_manual_0: | ||
644 | .word SDRC_MANUAL_0_P | ||
645 | sdrc_mr_1: | ||
646 | .word SDRC_MR_1_P | ||
647 | sdrc_emr2_1: | ||
648 | .word SDRC_EMR2_1_P | ||
649 | sdrc_manual_1: | ||
650 | .word SDRC_MANUAL_1_P | ||
651 | ENTRY(es3_sdrc_fix_sz) | ||
652 | .word . - es3_sdrc_fix | ||
653 | |||
654 | /* | ||
655 | * This function implements the erratum ID i581 WA: | ||
656 | * SDRC state restore before accessing the SDRAM | ||
657 | * | ||
658 | * Only used at return from non-OFF mode. For OFF | ||
659 | * mode the ROM code configures the SDRC and | ||
660 | * the DPLL before calling the restore code directly | ||
661 | * from DDR. | ||
662 | */ | ||
609 | 663 | ||
610 | /* Make sure SDRC accesses are ok */ | 664 | /* Make sure SDRC accesses are ok */ |
611 | wait_sdrc_ok: | 665 | wait_sdrc_ok: |
612 | ldr r4, cm_idlest1_core | 666 | |
613 | ldr r5, [r4] | 667 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */ |
614 | and r5, r5, #0x2 | 668 | ldr r4, cm_idlest_ckgen |
615 | cmp r5, #0 | 669 | wait_dpll3_lock: |
616 | bne wait_sdrc_ok | 670 | ldr r5, [r4] |
617 | ldr r4, sdrc_power | 671 | tst r5, #1 |
618 | ldr r5, [r4] | 672 | beq wait_dpll3_lock |
619 | bic r5, r5, #0x40 | 673 | |
620 | str r5, [r4] | 674 | ldr r4, cm_idlest1_core |
675 | wait_sdrc_ready: | ||
676 | ldr r5, [r4] | ||
677 | tst r5, #0x2 | ||
678 | bne wait_sdrc_ready | ||
679 | /* allow DLL powerdown upon hw idle req */ | ||
680 | ldr r4, sdrc_power | ||
681 | ldr r5, [r4] | ||
682 | bic r5, r5, #0x40 | ||
683 | str r5, [r4] | ||
684 | |||
685 | is_dll_in_lock_mode: | ||
686 | /* Is dll in lock mode? */ | ||
687 | ldr r4, sdrc_dlla_ctrl | ||
688 | ldr r5, [r4] | ||
689 | tst r5, #0x4 | ||
690 | bxne lr @ Return if locked | ||
691 | /* wait till dll locks */ | ||
692 | wait_dll_lock_timed: | ||
693 | ldr r4, wait_dll_lock_counter | ||
694 | add r4, r4, #1 | ||
695 | str r4, wait_dll_lock_counter | ||
696 | ldr r4, sdrc_dlla_status | ||
697 | /* Wait 20uS for lock */ | ||
698 | mov r6, #8 | ||
621 | wait_dll_lock: | 699 | wait_dll_lock: |
622 | /* Is dll in lock mode? */ | 700 | subs r6, r6, #0x1 |
623 | ldr r4, sdrc_dlla_ctrl | 701 | beq kick_dll |
624 | ldr r5, [r4] | 702 | ldr r5, [r4] |
625 | tst r5, #0x4 | 703 | and r5, r5, #0x4 |
626 | bxne lr | 704 | cmp r5, #0x4 |
627 | /* wait till dll locks */ | 705 | bne wait_dll_lock |
628 | ldr r4, sdrc_dlla_status | 706 | bx lr @ Return when locked |
629 | ldr r5, [r4] | 707 | |
630 | and r5, r5, #0x4 | 708 | /* disable/reenable DLL if not locked */ |
631 | cmp r5, #0x4 | 709 | kick_dll: |
632 | bne wait_dll_lock | 710 | ldr r4, sdrc_dlla_ctrl |
633 | bx lr | 711 | ldr r5, [r4] |
712 | mov r6, r5 | ||
713 | bic r6, #(1<<3) @ disable dll | ||
714 | str r6, [r4] | ||
715 | dsb | ||
716 | orr r6, r6, #(1<<3) @ enable dll | ||
717 | str r6, [r4] | ||
718 | dsb | ||
719 | ldr r4, kick_counter | ||
720 | add r4, r4, #1 | ||
721 | str r4, kick_counter | ||
722 | b wait_dll_lock_timed | ||
634 | 723 | ||
635 | cm_idlest1_core: | 724 | cm_idlest1_core: |
636 | .word CM_IDLEST1_CORE_V | 725 | .word CM_IDLEST1_CORE_V |
726 | cm_idlest_ckgen: | ||
727 | .word CM_IDLEST_CKGEN_V | ||
637 | sdrc_dlla_status: | 728 | sdrc_dlla_status: |
638 | .word SDRC_DLLA_STATUS_V | 729 | .word SDRC_DLLA_STATUS_V |
639 | sdrc_dlla_ctrl: | 730 | sdrc_dlla_ctrl: |
640 | .word SDRC_DLLA_CTRL_V | 731 | .word SDRC_DLLA_CTRL_V |
641 | pm_prepwstst_core: | ||
642 | .word PM_PREPWSTST_CORE_V | ||
643 | pm_prepwstst_core_p: | 732 | pm_prepwstst_core_p: |
644 | .word PM_PREPWSTST_CORE_P | 733 | .word PM_PREPWSTST_CORE_P |
645 | pm_prepwstst_mpu: | ||
646 | .word PM_PREPWSTST_MPU_V | ||
647 | pm_pwstctrl_mpu: | 734 | pm_pwstctrl_mpu: |
648 | .word PM_PWSTCTRL_MPU_P | 735 | .word PM_PWSTCTRL_MPU_P |
649 | scratchpad_base: | 736 | scratchpad_base: |
@@ -651,13 +738,7 @@ scratchpad_base: | |||
651 | sram_base: | 738 | sram_base: |
652 | .word SRAM_BASE_P + 0x8000 | 739 | .word SRAM_BASE_P + 0x8000 |
653 | sdrc_power: | 740 | sdrc_power: |
654 | .word SDRC_POWER_V | 741 | .word SDRC_POWER_V |
655 | clk_stabilize_delay: | ||
656 | .word 0x000001FF | ||
657 | assoc_mask: | ||
658 | .word 0x3ff | ||
659 | numset_mask: | ||
660 | .word 0x7fff | ||
661 | ttbrbit_mask: | 742 | ttbrbit_mask: |
662 | .word 0xFFFFC000 | 743 | .word 0xFFFFC000 |
663 | table_index_mask: | 744 | table_index_mask: |
@@ -668,5 +749,20 @@ cache_pred_disable_mask: | |||
668 | .word 0xFFFFE7FB | 749 | .word 0xFFFFE7FB |
669 | control_stat: | 750 | control_stat: |
670 | .word CONTROL_STAT | 751 | .word CONTROL_STAT |
752 | control_mem_rta: | ||
753 | .word CONTROL_MEM_RTA_CTRL | ||
754 | kernel_flush: | ||
755 | .word v7_flush_dcache_all | ||
756 | l2dis_3630: | ||
757 | .word 0 | ||
758 | /* | ||
759 | * When exporting to userspace while the counters are in SRAM, | ||
760 | * these 2 words need to be at the end to facilitate retrival! | ||
761 | */ | ||
762 | kick_counter: | ||
763 | .word 0 | ||
764 | wait_dll_lock_counter: | ||
765 | .word 0 | ||
766 | |||
671 | ENTRY(omap34xx_cpu_suspend_sz) | 767 | ENTRY(omap34xx_cpu_suspend_sz) |
672 | .word . - omap34xx_cpu_suspend | 768 | .word . - omap34xx_cpu_suspend |