diff options
Diffstat (limited to 'arch/avr32/mach-at32ap/pm-at32ap700x.S')
-rw-r--r-- | arch/avr32/mach-at32ap/pm-at32ap700x.S | 108 |
1 files changed, 108 insertions, 0 deletions
diff --git a/arch/avr32/mach-at32ap/pm-at32ap700x.S b/arch/avr32/mach-at32ap/pm-at32ap700x.S index 949e2485e278..0a53ad314ff4 100644 --- a/arch/avr32/mach-at32ap/pm-at32ap700x.S +++ b/arch/avr32/mach-at32ap/pm-at32ap700x.S | |||
@@ -12,6 +12,12 @@ | |||
12 | #include <asm/thread_info.h> | 12 | #include <asm/thread_info.h> |
13 | #include <asm/arch/pm.h> | 13 | #include <asm/arch/pm.h> |
14 | 14 | ||
15 | #include "pm.h" | ||
16 | #include "sdramc.h" | ||
17 | |||
18 | /* Same as 0xfff00000 but fits in a 21 bit signed immediate */ | ||
19 | #define PM_BASE -0x100000 | ||
20 | |||
15 | .section .bss, "wa", @nobits | 21 | .section .bss, "wa", @nobits |
16 | .global disable_idle_sleep | 22 | .global disable_idle_sleep |
17 | .type disable_idle_sleep, @object | 23 | .type disable_idle_sleep, @object |
@@ -64,3 +70,105 @@ cpu_idle_skip_sleep: | |||
64 | unmask_interrupts | 70 | unmask_interrupts |
65 | retal r12 | 71 | retal r12 |
66 | .size cpu_idle_skip_sleep, . - cpu_idle_skip_sleep | 72 | .size cpu_idle_skip_sleep, . - cpu_idle_skip_sleep |
73 | |||
74 | #ifdef CONFIG_PM | ||
75 | .section .init.text, "ax", @progbits | ||
76 | |||
77 | .global pm_exception | ||
78 | .type pm_exception, @function | ||
79 | pm_exception: | ||
80 | /* | ||
81 | * Exceptions are masked when we switch to this handler, so | ||
82 | * we'll only get "unrecoverable" exceptions (offset 0.) | ||
83 | */ | ||
84 | sub r12, pc, . - .Lpanic_msg | ||
85 | lddpc pc, .Lpanic_addr | ||
86 | |||
87 | .align 2 | ||
88 | .Lpanic_addr: | ||
89 | .long panic | ||
90 | .Lpanic_msg: | ||
91 | .asciz "Unrecoverable exception during suspend\n" | ||
92 | .size pm_exception, . - pm_exception | ||
93 | |||
94 | .global pm_irq0 | ||
95 | .type pm_irq0, @function | ||
96 | pm_irq0: | ||
97 | /* Disable interrupts and return after the sleep instruction */ | ||
98 | mfsr r9, SYSREG_RSR_INT0 | ||
99 | mtsr SYSREG_RAR_INT0, r8 | ||
100 | sbr r9, SYSREG_GM_OFFSET | ||
101 | mtsr SYSREG_RSR_INT0, r9 | ||
102 | rete | ||
103 | |||
104 | /* | ||
105 | * void cpu_enter_standby(unsigned long sdramc_base) | ||
106 | * | ||
107 | * Enter PM_SUSPEND_STANDBY mode. At this point, all drivers | ||
108 | * are suspended and interrupts are disabled. Interrupts | ||
109 | * marked as 'wakeup' event sources may still come along and | ||
110 | * get us out of here. | ||
111 | * | ||
112 | * The SDRAM will be put into self-refresh mode (which does | ||
113 | * not require a clock from the CPU), and the CPU will be put | ||
114 | * into "frozen" mode (HSB bus stopped). The SDRAM controller | ||
115 | * will automatically bring the SDRAM into normal mode on the | ||
116 | * first access, and the power manager will automatically | ||
117 | * start the HSB and CPU clocks upon a wakeup event. | ||
118 | * | ||
119 | * This code uses the same "skip sleep" technique as above. | ||
120 | * It is very important that we jump directly to | ||
121 | * cpu_after_sleep after the sleep instruction since that's | ||
122 | * where we'll end up if the interrupt handler decides that we | ||
123 | * need to skip the sleep instruction. | ||
124 | */ | ||
125 | .global pm_standby | ||
126 | .type pm_standby, @function | ||
127 | pm_standby: | ||
128 | /* | ||
129 | * interrupts are already masked at this point, and EVBA | ||
130 | * points to pm_exception above. | ||
131 | */ | ||
132 | ld.w r10, r12[SDRAMC_LPR] | ||
133 | sub r8, pc, . - 1f /* return address for irq handler */ | ||
134 | mov r11, SDRAMC_LPR_LPCB_SELF_RFR | ||
135 | bfins r10, r11, 0, 2 /* LPCB <- self Refresh */ | ||
136 | sync 0 /* flush write buffer */ | ||
137 | st.w r12[SDRAMC_LPR], r11 /* put SDRAM in self-refresh mode */ | ||
138 | ld.w r11, r12[SDRAMC_LPR] | ||
139 | unmask_interrupts | ||
140 | sleep CPU_SLEEP_FROZEN | ||
141 | 1: mask_interrupts | ||
142 | retal r12 | ||
143 | .size pm_standby, . - pm_standby | ||
144 | |||
145 | .global pm_suspend_to_ram | ||
146 | .type pm_suspend_to_ram, @function | ||
147 | pm_suspend_to_ram: | ||
148 | /* | ||
149 | * interrupts are already masked at this point, and EVBA | ||
150 | * points to pm_exception above. | ||
151 | */ | ||
152 | mov r11, 0 | ||
153 | cache r11[2], 8 /* clean all dcache lines */ | ||
154 | sync 0 /* flush write buffer */ | ||
155 | ld.w r10, r12[SDRAMC_LPR] | ||
156 | sub r8, pc, . - 1f /* return address for irq handler */ | ||
157 | mov r11, SDRAMC_LPR_LPCB_SELF_RFR | ||
158 | bfins r10, r11, 0, 2 /* LPCB <- self refresh */ | ||
159 | st.w r12[SDRAMC_LPR], r10 /* put SDRAM in self-refresh mode */ | ||
160 | ld.w r11, r12[SDRAMC_LPR] | ||
161 | |||
162 | unmask_interrupts | ||
163 | sleep CPU_SLEEP_STOP | ||
164 | 1: mask_interrupts | ||
165 | |||
166 | retal r12 | ||
167 | .size pm_suspend_to_ram, . - pm_suspend_to_ram | ||
168 | |||
169 | .global pm_sram_end | ||
170 | .type pm_sram_end, @function | ||
171 | pm_sram_end: | ||
172 | .size pm_sram_end, 0 | ||
173 | |||
174 | #endif /* CONFIG_PM */ | ||