diff options
author | Paul Mundt <lethal@linux-sh.org> | 2010-01-21 00:19:41 -0500 |
---|---|---|
committer | Paul Mundt <lethal@linux-sh.org> | 2010-01-21 00:19:41 -0500 |
commit | 77c2019fc1b4495ce483ef2af9cb12bae23e353b (patch) | |
tree | a5147af1c18fabf6384db75c33dea03648c88bf4 /arch/sh | |
parent | 35f99c0da17beb5004f06ba86a87d82c1a6467be (diff) |
sh: initial PMB mapping iteration by helper macro.
All of the cached/uncached mapping setup is duplicated for each size, and
also misses out on the 16MB case. Rather than duplicating the same iter
code for that we just consolidate it in to a helper macro that builds an
iter for each size. The 16MB case is then trivially bolted on at the end.
Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'arch/sh')
-rw-r--r-- | arch/sh/kernel/head_32.S | 207 |
1 files changed, 60 insertions, 147 deletions
diff --git a/arch/sh/kernel/head_32.S b/arch/sh/kernel/head_32.S index 8ee31a0b973e..ee954d1b8cc6 100644 --- a/arch/sh/kernel/head_32.S +++ b/arch/sh/kernel/head_32.S | |||
@@ -86,6 +86,62 @@ ENTRY(_stext) | |||
86 | #endif | 86 | #endif |
87 | 87 | ||
88 | #if defined(CONFIG_PMB) && !defined(CONFIG_PMB_LEGACY) | 88 | #if defined(CONFIG_PMB) && !defined(CONFIG_PMB_LEGACY) |
89 | |||
90 | #define __PMB_ITER_BY_SIZE(size) \ | ||
91 | .L##size: \ | ||
92 | mov #(size >> 4), r6; \ | ||
93 | shll16 r6; \ | ||
94 | shll8 r6; \ | ||
95 | \ | ||
96 | cmp/hi r5, r6; \ | ||
97 | bt 9999f; \ | ||
98 | \ | ||
99 | mov #(PMB_SZ_##size##M >> 2), r9; \ | ||
100 | shll2 r9; \ | ||
101 | \ | ||
102 | /* \ | ||
103 | * Cached mapping \ | ||
104 | */ \ | ||
105 | mov #PMB_C, r8; \ | ||
106 | or r0, r8; \ | ||
107 | or r9, r8; \ | ||
108 | mov.l r8, @r1; \ | ||
109 | mov.l r2, @r3; \ | ||
110 | \ | ||
111 | /* Increment to the next PMB_DATA entry */ \ | ||
112 | add r4, r1; \ | ||
113 | /* Increment to the next PMB_ADDR entry */ \ | ||
114 | add r4, r3; \ | ||
115 | /* Increment number of PMB entries */ \ | ||
116 | add #1, r10; \ | ||
117 | \ | ||
118 | /* \ | ||
119 | * Uncached mapping \ | ||
120 | */ \ | ||
121 | mov #(PMB_UB >> 8), r8; \ | ||
122 | shll8 r8; \ | ||
123 | \ | ||
124 | or r0, r8; \ | ||
125 | or r9, r8; \ | ||
126 | mov.l r8, @r1; \ | ||
127 | mov r2, r8; \ | ||
128 | add r7, r8; \ | ||
129 | mov.l r8, @r3; \ | ||
130 | \ | ||
131 | /* Increment to the next PMB_DATA entry */ \ | ||
132 | add r4, r1; \ | ||
133 | /* Increment to the next PMB_ADDR entry */ \ | ||
134 | add r4, r3; \ | ||
135 | /* Increment number of PMB entries */ \ | ||
136 | add #1, r10; \ | ||
137 | \ | ||
138 | sub r6, r5; \ | ||
139 | add r6, r0; \ | ||
140 | add r6, r2; \ | ||
141 | \ | ||
142 | bra .L##size; \ | ||
143 | 9999: | ||
144 | |||
89 | /* | 145 | /* |
90 | * Reconfigure the initial PMB mappings setup by the hardware. | 146 | * Reconfigure the initial PMB mappings setup by the hardware. |
91 | * | 147 | * |
@@ -142,154 +198,11 @@ ENTRY(_stext) | |||
142 | * r9 = scratch register | 198 | * r9 = scratch register |
143 | * r10 = number of PMB entries we've setup | 199 | * r10 = number of PMB entries we've setup |
144 | */ | 200 | */ |
145 | .L512: | 201 | __PMB_ITER_BY_SIZE(512) |
146 | mov #(512 >> 4), r6 | 202 | __PMB_ITER_BY_SIZE(128) |
147 | shll16 r6 | 203 | __PMB_ITER_BY_SIZE(64) |
148 | shll8 r6 | 204 | __PMB_ITER_BY_SIZE(16) |
149 | |||
150 | cmp/hi r5, r6 | ||
151 | bt .L128 | ||
152 | |||
153 | mov #(PMB_SZ_512M >> 2), r9 | ||
154 | shll2 r9 | ||
155 | |||
156 | /* | ||
157 | * Cached mapping | ||
158 | */ | ||
159 | mov #PMB_C, r8 | ||
160 | or r0, r8 | ||
161 | or r9, r8 | ||
162 | mov.l r8, @r1 | ||
163 | mov.l r2, @r3 | ||
164 | |||
165 | add r4, r1 /* Increment to the next PMB_DATA entry */ | ||
166 | add r4, r3 /* Increment to the next PMB_ADDR entry */ | ||
167 | |||
168 | add #1, r10 /* Increment number of PMB entries */ | ||
169 | |||
170 | /* | ||
171 | * Uncached mapping | ||
172 | */ | ||
173 | mov #(PMB_UB >> 8), r8 | ||
174 | shll8 r8 | ||
175 | |||
176 | or r0, r8 | ||
177 | or r9, r8 | ||
178 | mov.l r8, @r1 | ||
179 | mov r2, r8 | ||
180 | add r7, r8 | ||
181 | mov.l r8, @r3 | ||
182 | |||
183 | add r4, r1 /* Increment to the next PMB_DATA entry */ | ||
184 | add r4, r3 /* Increment to the next PMB_ADDR entry */ | ||
185 | |||
186 | add #1, r10 /* Increment number of PMB entries */ | ||
187 | |||
188 | sub r6, r5 | ||
189 | add r6, r0 | ||
190 | add r6, r2 | ||
191 | |||
192 | bra .L512 | ||
193 | |||
194 | .L128: | ||
195 | mov #(128 >> 4), r6 | ||
196 | shll16 r6 | ||
197 | shll8 r6 | ||
198 | |||
199 | cmp/hi r5, r6 | ||
200 | bt .L64 | ||
201 | |||
202 | mov #(PMB_SZ_128M >> 2), r9 | ||
203 | shll2 r9 | ||
204 | |||
205 | /* | ||
206 | * Cached mapping | ||
207 | */ | ||
208 | mov #PMB_C, r8 | ||
209 | or r0, r8 | ||
210 | or r9, r8 | ||
211 | mov.l r8, @r1 | ||
212 | mov.l r2, @r3 | ||
213 | |||
214 | add r4, r1 /* Increment to the next PMB_DATA entry */ | ||
215 | add r4, r3 /* Increment to the next PMB_ADDR entry */ | ||
216 | |||
217 | add #1, r10 /* Increment number of PMB entries */ | ||
218 | |||
219 | /* | ||
220 | * Uncached mapping | ||
221 | */ | ||
222 | mov #(PMB_UB >> 8), r8 | ||
223 | shll8 r8 | ||
224 | |||
225 | or r0, r8 | ||
226 | or r9, r8 | ||
227 | mov.l r8, @r1 | ||
228 | mov r2, r8 | ||
229 | add r7, r8 | ||
230 | mov.l r8, @r3 | ||
231 | |||
232 | add r4, r1 /* Increment to the next PMB_DATA entry */ | ||
233 | add r4, r3 /* Increment to the next PMB_ADDR entry */ | ||
234 | |||
235 | add #1, r10 /* Increment number of PMB entries */ | ||
236 | |||
237 | sub r6, r5 | ||
238 | add r6, r0 | ||
239 | add r6, r2 | ||
240 | |||
241 | bra .L128 | ||
242 | |||
243 | .L64: | ||
244 | mov #(64 >> 4), r6 | ||
245 | shll16 r6 | ||
246 | shll8 r6 | ||
247 | |||
248 | cmp/hi r5, r6 | ||
249 | bt .Ldone | ||
250 | |||
251 | mov #(PMB_SZ_64M >> 2), r9 | ||
252 | shll2 r9 | ||
253 | |||
254 | /* | ||
255 | * Cached mapping | ||
256 | */ | ||
257 | mov #PMB_C, r8 | ||
258 | or r0, r8 | ||
259 | or r9, r8 | ||
260 | mov.l r8, @r1 | ||
261 | mov.l r2, @r3 | ||
262 | |||
263 | add r4, r1 /* Increment to the next PMB_DATA entry */ | ||
264 | add r4, r3 /* Increment to the next PMB_ADDR entry */ | ||
265 | |||
266 | add #1, r10 /* Increment number of PMB entries */ | ||
267 | |||
268 | /* | ||
269 | * Uncached mapping | ||
270 | */ | ||
271 | mov #(PMB_UB >> 8), r8 | ||
272 | shll8 r8 | ||
273 | |||
274 | or r0, r8 | ||
275 | or r9, r8 | ||
276 | mov.l r8, @r1 | ||
277 | mov r2, r8 | ||
278 | add r7, r8 | ||
279 | mov.l r8, @r3 | ||
280 | |||
281 | add r4, r1 /* Increment to the next PMB_DATA entry */ | ||
282 | add r4, r3 /* Increment to the next PMB_ADDR entry */ | ||
283 | |||
284 | add #1, r10 /* Increment number of PMB entries */ | ||
285 | |||
286 | sub r6, r5 | ||
287 | add r6, r0 | ||
288 | add r6, r2 | ||
289 | |||
290 | bra .L64 | ||
291 | 205 | ||
292 | .Ldone: | ||
293 | /* Update cached_to_uncached */ | 206 | /* Update cached_to_uncached */ |
294 | mov.l .Lcached_to_uncached, r0 | 207 | mov.l .Lcached_to_uncached, r0 |
295 | mov.l r7, @r0 | 208 | mov.l r7, @r0 |