diff options
Diffstat (limited to 'arch/arm/mm')
-rw-r--r-- | arch/arm/mm/Kconfig | 18 | ||||
-rw-r--r-- | arch/arm/mm/cache-l2x0.c | 6 | ||||
-rw-r--r-- | arch/arm/mm/mmu.c | 14 | ||||
-rw-r--r-- | arch/arm/mm/proc-syms.c | 2 | ||||
-rw-r--r-- | arch/arm/mm/proc-v7.S | 33 |
5 files changed, 38 insertions, 35 deletions
diff --git a/arch/arm/mm/Kconfig b/arch/arm/mm/Kconfig index e7904bc92c73..12161ae445da 100644 --- a/arch/arm/mm/Kconfig +++ b/arch/arm/mm/Kconfig | |||
@@ -345,13 +345,14 @@ config CPU_XSC3 | |||
345 | # ARMv6 | 345 | # ARMv6 |
346 | config CPU_V6 | 346 | config CPU_V6 |
347 | bool "Support ARM V6 processor" | 347 | bool "Support ARM V6 processor" |
348 | depends on ARCH_INTEGRATOR || MACH_REALVIEW_EB || ARCH_OMAP2 | 348 | depends on ARCH_INTEGRATOR || MACH_REALVIEW_EB || ARCH_OMAP2 || ARCH_MX3 |
349 | default y if ARCH_MX3 | ||
349 | select CPU_32v6 | 350 | select CPU_32v6 |
350 | select CPU_ABRT_EV6 | 351 | select CPU_ABRT_EV6 |
351 | select CPU_CACHE_V6 | 352 | select CPU_CACHE_V6 |
352 | select CPU_CACHE_VIPT | 353 | select CPU_CACHE_VIPT |
353 | select CPU_CP15_MMU | 354 | select CPU_CP15_MMU |
354 | select CPU_HAS_ASID | 355 | select CPU_HAS_ASID if MMU |
355 | select CPU_COPY_V6 if MMU | 356 | select CPU_COPY_V6 if MMU |
356 | select CPU_TLB_V6 if MMU | 357 | select CPU_TLB_V6 if MMU |
357 | 358 | ||
@@ -359,7 +360,7 @@ config CPU_V6 | |||
359 | config CPU_32v6K | 360 | config CPU_32v6K |
360 | bool "Support ARM V6K processor extensions" if !SMP | 361 | bool "Support ARM V6K processor extensions" if !SMP |
361 | depends on CPU_V6 | 362 | depends on CPU_V6 |
362 | default y if SMP | 363 | default y if SMP && !ARCH_MX3 |
363 | help | 364 | help |
364 | Say Y here if your ARMv6 processor supports the 'K' extension. | 365 | Say Y here if your ARMv6 processor supports the 'K' extension. |
365 | This enables the kernel to use some instructions not present | 366 | This enables the kernel to use some instructions not present |
@@ -377,7 +378,7 @@ config CPU_V7 | |||
377 | select CPU_CACHE_V7 | 378 | select CPU_CACHE_V7 |
378 | select CPU_CACHE_VIPT | 379 | select CPU_CACHE_VIPT |
379 | select CPU_CP15_MMU | 380 | select CPU_CP15_MMU |
380 | select CPU_HAS_ASID | 381 | select CPU_HAS_ASID if MMU |
381 | select CPU_COPY_V6 if MMU | 382 | select CPU_COPY_V6 if MMU |
382 | select CPU_TLB_V7 if MMU | 383 | select CPU_TLB_V7 if MMU |
383 | 384 | ||
@@ -405,6 +406,7 @@ config CPU_32v5 | |||
405 | 406 | ||
406 | config CPU_32v6 | 407 | config CPU_32v6 |
407 | bool | 408 | bool |
409 | select TLS_REG_EMUL if !CPU_32v6K && !MMU | ||
408 | 410 | ||
409 | config CPU_32v7 | 411 | config CPU_32v7 |
410 | bool | 412 | bool |
@@ -598,7 +600,7 @@ config CPU_DCACHE_SIZE | |||
598 | 600 | ||
599 | config CPU_DCACHE_WRITETHROUGH | 601 | config CPU_DCACHE_WRITETHROUGH |
600 | bool "Force write through D-cache" | 602 | bool "Force write through D-cache" |
601 | depends on (CPU_ARM740T || CPU_ARM920T || CPU_ARM922T || CPU_ARM925T || CPU_ARM926T || CPU_ARM940T || CPU_ARM946E || CPU_ARM1020 || CPU_V6) && !CPU_DCACHE_DISABLE | 603 | depends on (CPU_ARM740T || CPU_ARM920T || CPU_ARM922T || CPU_ARM925T || CPU_ARM926T || CPU_ARM940T || CPU_ARM946E || CPU_ARM1020) && !CPU_DCACHE_DISABLE |
602 | default y if CPU_ARM925T | 604 | default y if CPU_ARM925T |
603 | help | 605 | help |
604 | Say Y here to use the data cache in writethrough mode. Unless you | 606 | Say Y here to use the data cache in writethrough mode. Unless you |
@@ -611,12 +613,6 @@ config CPU_CACHE_ROUND_ROBIN | |||
611 | Say Y here to use the predictable round-robin cache replacement | 613 | Say Y here to use the predictable round-robin cache replacement |
612 | policy. Unless you specifically require this or are unsure, say N. | 614 | policy. Unless you specifically require this or are unsure, say N. |
613 | 615 | ||
614 | config CPU_L2CACHE_DISABLE | ||
615 | bool "Disable level 2 cache" | ||
616 | depends on CPU_V7 | ||
617 | help | ||
618 | Say Y here to disable the level 2 cache. If unsure, say N. | ||
619 | |||
620 | config CPU_BPREDICT_DISABLE | 616 | config CPU_BPREDICT_DISABLE |
621 | bool "Disable branch prediction" | 617 | bool "Disable branch prediction" |
622 | depends on CPU_ARM1020 || CPU_V6 || CPU_XSC3 || CPU_V7 | 618 | depends on CPU_ARM1020 || CPU_V6 || CPU_XSC3 || CPU_V7 |
diff --git a/arch/arm/mm/cache-l2x0.c b/arch/arm/mm/cache-l2x0.c index 08a36f1b35d2..b4e9b734e0bd 100644 --- a/arch/arm/mm/cache-l2x0.c +++ b/arch/arm/mm/cache-l2x0.c | |||
@@ -17,6 +17,7 @@ | |||
17 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 17 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
18 | */ | 18 | */ |
19 | #include <linux/init.h> | 19 | #include <linux/init.h> |
20 | #include <linux/spinlock.h> | ||
20 | 21 | ||
21 | #include <asm/cacheflush.h> | 22 | #include <asm/cacheflush.h> |
22 | #include <asm/io.h> | 23 | #include <asm/io.h> |
@@ -25,14 +26,19 @@ | |||
25 | #define CACHE_LINE_SIZE 32 | 26 | #define CACHE_LINE_SIZE 32 |
26 | 27 | ||
27 | static void __iomem *l2x0_base; | 28 | static void __iomem *l2x0_base; |
29 | static DEFINE_SPINLOCK(l2x0_lock); | ||
28 | 30 | ||
29 | static inline void sync_writel(unsigned long val, unsigned long reg, | 31 | static inline void sync_writel(unsigned long val, unsigned long reg, |
30 | unsigned long complete_mask) | 32 | unsigned long complete_mask) |
31 | { | 33 | { |
34 | unsigned long flags; | ||
35 | |||
36 | spin_lock_irqsave(&l2x0_lock, flags); | ||
32 | writel(val, l2x0_base + reg); | 37 | writel(val, l2x0_base + reg); |
33 | /* wait for the operation to complete */ | 38 | /* wait for the operation to complete */ |
34 | while (readl(l2x0_base + reg) & complete_mask) | 39 | while (readl(l2x0_base + reg) & complete_mask) |
35 | ; | 40 | ; |
41 | spin_unlock_irqrestore(&l2x0_lock, flags); | ||
36 | } | 42 | } |
37 | 43 | ||
38 | static inline void cache_sync(void) | 44 | static inline void cache_sync(void) |
diff --git a/arch/arm/mm/mmu.c b/arch/arm/mm/mmu.c index 3b5e47dc0c97..e5d61ee3d4a1 100644 --- a/arch/arm/mm/mmu.c +++ b/arch/arm/mm/mmu.c | |||
@@ -114,6 +114,10 @@ static void __init early_cachepolicy(char **p) | |||
114 | } | 114 | } |
115 | if (i == ARRAY_SIZE(cache_policies)) | 115 | if (i == ARRAY_SIZE(cache_policies)) |
116 | printk(KERN_ERR "ERROR: unknown or unsupported cache policy\n"); | 116 | printk(KERN_ERR "ERROR: unknown or unsupported cache policy\n"); |
117 | if (cpu_architecture() >= CPU_ARCH_ARMv6) { | ||
118 | printk(KERN_WARNING "Only cachepolicy=writeback supported on ARMv6 and later\n"); | ||
119 | cachepolicy = CPOLICY_WRITEBACK; | ||
120 | } | ||
117 | flush_cache_all(); | 121 | flush_cache_all(); |
118 | set_cr(cr_alignment); | 122 | set_cr(cr_alignment); |
119 | } | 123 | } |
@@ -252,13 +256,15 @@ static void __init build_mem_type_table(void) | |||
252 | int cpu_arch = cpu_architecture(); | 256 | int cpu_arch = cpu_architecture(); |
253 | int i; | 257 | int i; |
254 | 258 | ||
259 | if (cpu_arch < CPU_ARCH_ARMv6) { | ||
255 | #if defined(CONFIG_CPU_DCACHE_DISABLE) | 260 | #if defined(CONFIG_CPU_DCACHE_DISABLE) |
256 | if (cachepolicy > CPOLICY_BUFFERED) | 261 | if (cachepolicy > CPOLICY_BUFFERED) |
257 | cachepolicy = CPOLICY_BUFFERED; | 262 | cachepolicy = CPOLICY_BUFFERED; |
258 | #elif defined(CONFIG_CPU_DCACHE_WRITETHROUGH) | 263 | #elif defined(CONFIG_CPU_DCACHE_WRITETHROUGH) |
259 | if (cachepolicy > CPOLICY_WRITETHROUGH) | 264 | if (cachepolicy > CPOLICY_WRITETHROUGH) |
260 | cachepolicy = CPOLICY_WRITETHROUGH; | 265 | cachepolicy = CPOLICY_WRITETHROUGH; |
261 | #endif | 266 | #endif |
267 | } | ||
262 | if (cpu_arch < CPU_ARCH_ARMv5) { | 268 | if (cpu_arch < CPU_ARCH_ARMv5) { |
263 | if (cachepolicy >= CPOLICY_WRITEALLOC) | 269 | if (cachepolicy >= CPOLICY_WRITEALLOC) |
264 | cachepolicy = CPOLICY_WRITEBACK; | 270 | cachepolicy = CPOLICY_WRITEBACK; |
diff --git a/arch/arm/mm/proc-syms.c b/arch/arm/mm/proc-syms.c index 9f396b4fa0b7..2b5ba396e3a6 100644 --- a/arch/arm/mm/proc-syms.c +++ b/arch/arm/mm/proc-syms.c | |||
@@ -31,12 +31,14 @@ EXPORT_SYMBOL(__cpuc_coherent_kern_range); | |||
31 | EXPORT_SYMBOL(cpu_cache); | 31 | EXPORT_SYMBOL(cpu_cache); |
32 | #endif | 32 | #endif |
33 | 33 | ||
34 | #ifdef CONFIG_MMU | ||
34 | #ifndef MULTI_USER | 35 | #ifndef MULTI_USER |
35 | EXPORT_SYMBOL(__cpu_clear_user_page); | 36 | EXPORT_SYMBOL(__cpu_clear_user_page); |
36 | EXPORT_SYMBOL(__cpu_copy_user_page); | 37 | EXPORT_SYMBOL(__cpu_copy_user_page); |
37 | #else | 38 | #else |
38 | EXPORT_SYMBOL(cpu_user); | 39 | EXPORT_SYMBOL(cpu_user); |
39 | #endif | 40 | #endif |
41 | #endif | ||
40 | 42 | ||
41 | /* | 43 | /* |
42 | * No module should need to touch the TLB (and currently | 44 | * No module should need to touch the TLB (and currently |
diff --git a/arch/arm/mm/proc-v7.S b/arch/arm/mm/proc-v7.S index 718f4782ee8b..e0acc5ae6f6f 100644 --- a/arch/arm/mm/proc-v7.S +++ b/arch/arm/mm/proc-v7.S | |||
@@ -77,6 +77,7 @@ ENTRY(cpu_v7_dcache_clean_area) | |||
77 | * - we are not using split page tables | 77 | * - we are not using split page tables |
78 | */ | 78 | */ |
79 | ENTRY(cpu_v7_switch_mm) | 79 | ENTRY(cpu_v7_switch_mm) |
80 | #ifdef CONFIG_MMU | ||
80 | mov r2, #0 | 81 | mov r2, #0 |
81 | ldr r1, [r1, #MM_CONTEXT_ID] @ get mm->context.id | 82 | ldr r1, [r1, #MM_CONTEXT_ID] @ get mm->context.id |
82 | orr r0, r0, #TTB_RGN_OC_WB @ mark PTWs outer cacheable, WB | 83 | orr r0, r0, #TTB_RGN_OC_WB @ mark PTWs outer cacheable, WB |
@@ -86,6 +87,7 @@ ENTRY(cpu_v7_switch_mm) | |||
86 | isb | 87 | isb |
87 | mcr p15, 0, r1, c13, c0, 1 @ set context ID | 88 | mcr p15, 0, r1, c13, c0, 1 @ set context ID |
88 | isb | 89 | isb |
90 | #endif | ||
89 | mov pc, lr | 91 | mov pc, lr |
90 | 92 | ||
91 | /* | 93 | /* |
@@ -109,6 +111,7 @@ ENTRY(cpu_v7_switch_mm) | |||
109 | * 1111 0 1 1 r/w r/w | 111 | * 1111 0 1 1 r/w r/w |
110 | */ | 112 | */ |
111 | ENTRY(cpu_v7_set_pte_ext) | 113 | ENTRY(cpu_v7_set_pte_ext) |
114 | #ifdef CONFIG_MMU | ||
112 | str r1, [r0], #-2048 @ linux version | 115 | str r1, [r0], #-2048 @ linux version |
113 | 116 | ||
114 | bic r3, r1, #0x000003f0 | 117 | bic r3, r1, #0x000003f0 |
@@ -136,6 +139,7 @@ ENTRY(cpu_v7_set_pte_ext) | |||
136 | 139 | ||
137 | str r3, [r0] | 140 | str r3, [r0] |
138 | mcr p15, 0, r0, c7, c10, 1 @ flush_pte | 141 | mcr p15, 0, r0, c7, c10, 1 @ flush_pte |
142 | #endif | ||
139 | mov pc, lr | 143 | mov pc, lr |
140 | 144 | ||
141 | cpu_v7_name: | 145 | cpu_v7_name: |
@@ -169,6 +173,7 @@ __v7_setup: | |||
169 | mcr p15, 0, r10, c7, c5, 0 @ I+BTB cache invalidate | 173 | mcr p15, 0, r10, c7, c5, 0 @ I+BTB cache invalidate |
170 | #endif | 174 | #endif |
171 | dsb | 175 | dsb |
176 | #ifdef CONFIG_MMU | ||
172 | mcr p15, 0, r10, c8, c7, 0 @ invalidate I + D TLBs | 177 | mcr p15, 0, r10, c8, c7, 0 @ invalidate I + D TLBs |
173 | mcr p15, 0, r10, c2, c0, 2 @ TTB control register | 178 | mcr p15, 0, r10, c2, c0, 2 @ TTB control register |
174 | orr r4, r4, #TTB_RGN_OC_WB @ mark PTWs outer cacheable, WB | 179 | orr r4, r4, #TTB_RGN_OC_WB @ mark PTWs outer cacheable, WB |
@@ -176,21 +181,12 @@ __v7_setup: | |||
176 | mcr p15, 0, r4, c2, c0, 1 @ load TTB1 | 181 | mcr p15, 0, r4, c2, c0, 1 @ load TTB1 |
177 | mov r10, #0x1f @ domains 0, 1 = manager | 182 | mov r10, #0x1f @ domains 0, 1 = manager |
178 | mcr p15, 0, r10, c3, c0, 0 @ load domain access register | 183 | mcr p15, 0, r10, c3, c0, 0 @ load domain access register |
179 | #ifndef CONFIG_CPU_L2CACHE_DISABLE | ||
180 | @ L2 cache configuration in the L2 aux control register | ||
181 | mrc p15, 1, r10, c9, c0, 2 | ||
182 | bic r10, r10, #(1 << 16) @ L2 outer cache | ||
183 | mcr p15, 1, r10, c9, c0, 2 | ||
184 | @ L2 cache is enabled in the aux control register | ||
185 | mrc p15, 0, r10, c1, c0, 1 | ||
186 | orr r10, r10, #2 | ||
187 | mcr p15, 0, r10, c1, c0, 1 | ||
188 | #endif | 184 | #endif |
189 | mrc p15, 0, r0, c1, c0, 0 @ read control register | 185 | adr r5, v7_crval |
190 | ldr r10, cr1_clear @ get mask for bits to clear | 186 | ldmia r5, {r5, r6} |
191 | bic r0, r0, r10 @ clear bits them | 187 | mrc p15, 0, r0, c1, c0, 0 @ read control register |
192 | ldr r10, cr1_set @ get mask for bits to set | 188 | bic r0, r0, r5 @ clear bits them |
193 | orr r0, r0, r10 @ set them | 189 | orr r0, r0, r6 @ set them |
194 | mov pc, lr @ return to head.S:__ret | 190 | mov pc, lr @ return to head.S:__ret |
195 | 191 | ||
196 | /* | 192 | /* |
@@ -199,12 +195,9 @@ __v7_setup: | |||
199 | * rrrr rrrx xxx0 0101 xxxx xxxx x111 xxxx < forced | 195 | * rrrr rrrx xxx0 0101 xxxx xxxx x111 xxxx < forced |
200 | * 0 110 0011 1.00 .111 1101 < we want | 196 | * 0 110 0011 1.00 .111 1101 < we want |
201 | */ | 197 | */ |
202 | .type cr1_clear, #object | 198 | .type v7_crval, #object |
203 | .type cr1_set, #object | 199 | v7_crval: |
204 | cr1_clear: | 200 | crval clear=0x0120c302, mmuset=0x00c0387d, ucset=0x00c0187c |
205 | .word 0x0120c302 | ||
206 | cr1_set: | ||
207 | .word 0x00c0387d | ||
208 | 201 | ||
209 | __v7_setup_stack: | 202 | __v7_setup_stack: |
210 | .space 4 * 11 @ 11 registers | 203 | .space 4 * 11 @ 11 registers |