aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/mm
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/mm')
-rw-r--r--arch/arm/mm/Kconfig13
-rw-r--r--arch/arm/mm/Makefile1
-rw-r--r--arch/arm/mm/alignment.c2
-rw-r--r--arch/arm/mm/ioremap.c2
-rw-r--r--arch/arm/mm/mmap.c2
-rw-r--r--arch/arm/mm/mmu.c2
-rw-r--r--arch/arm/mm/proc-v7.S2
-rw-r--r--arch/arm/mm/tlb-v7.S88
8 files changed, 106 insertions, 6 deletions
diff --git a/arch/arm/mm/Kconfig b/arch/arm/mm/Kconfig
index 15f0284010c..e7904bc92c7 100644
--- a/arch/arm/mm/Kconfig
+++ b/arch/arm/mm/Kconfig
@@ -351,6 +351,7 @@ config CPU_V6
351 select CPU_CACHE_V6 351 select CPU_CACHE_V6
352 select CPU_CACHE_VIPT 352 select CPU_CACHE_VIPT
353 select CPU_CP15_MMU 353 select CPU_CP15_MMU
354 select CPU_HAS_ASID
354 select CPU_COPY_V6 if MMU 355 select CPU_COPY_V6 if MMU
355 select CPU_TLB_V6 if MMU 356 select CPU_TLB_V6 if MMU
356 357
@@ -376,8 +377,9 @@ config CPU_V7
376 select CPU_CACHE_V7 377 select CPU_CACHE_V7
377 select CPU_CACHE_VIPT 378 select CPU_CACHE_VIPT
378 select CPU_CP15_MMU 379 select CPU_CP15_MMU
380 select CPU_HAS_ASID
379 select CPU_COPY_V6 if MMU 381 select CPU_COPY_V6 if MMU
380 select CPU_TLB_V6 if MMU 382 select CPU_TLB_V7 if MMU
381 383
382# Figure out what processor architecture version we should be using. 384# Figure out what processor architecture version we should be using.
383# This defines the compiler instruction set which depends on the machine type. 385# This defines the compiler instruction set which depends on the machine type.
@@ -496,8 +498,17 @@ config CPU_TLB_V4WBI
496config CPU_TLB_V6 498config CPU_TLB_V6
497 bool 499 bool
498 500
501config CPU_TLB_V7
502 bool
503
499endif 504endif
500 505
506config CPU_HAS_ASID
507 bool
508 help
509 This indicates whether the CPU has the ASID register; used to
510 tag TLB and possibly cache entries.
511
501config CPU_CP15 512config CPU_CP15
502 bool 513 bool
503 help 514 help
diff --git a/arch/arm/mm/Makefile b/arch/arm/mm/Makefile
index b5bd335ff14..762702765fc 100644
--- a/arch/arm/mm/Makefile
+++ b/arch/arm/mm/Makefile
@@ -46,6 +46,7 @@ obj-$(CONFIG_CPU_TLB_V4WT) += tlb-v4.o
46obj-$(CONFIG_CPU_TLB_V4WB) += tlb-v4wb.o 46obj-$(CONFIG_CPU_TLB_V4WB) += tlb-v4wb.o
47obj-$(CONFIG_CPU_TLB_V4WBI) += tlb-v4wbi.o 47obj-$(CONFIG_CPU_TLB_V4WBI) += tlb-v4wbi.o
48obj-$(CONFIG_CPU_TLB_V6) += tlb-v6.o 48obj-$(CONFIG_CPU_TLB_V6) += tlb-v6.o
49obj-$(CONFIG_CPU_TLB_V7) += tlb-v7.o
49 50
50obj-$(CONFIG_CPU_ARM610) += proc-arm6_7.o 51obj-$(CONFIG_CPU_ARM610) += proc-arm6_7.o
51obj-$(CONFIG_CPU_ARM710) += proc-arm6_7.o 52obj-$(CONFIG_CPU_ARM710) += proc-arm6_7.o
diff --git a/arch/arm/mm/alignment.c b/arch/arm/mm/alignment.c
index 19ca333240e..36440c89958 100644
--- a/arch/arm/mm/alignment.c
+++ b/arch/arm/mm/alignment.c
@@ -3,7 +3,7 @@
3 * 3 *
4 * Copyright (C) 1995 Linus Torvalds 4 * Copyright (C) 1995 Linus Torvalds
5 * Modifications for ARM processor (c) 1995-2001 Russell King 5 * Modifications for ARM processor (c) 1995-2001 Russell King
6 * Thumb aligment fault fixups (c) 2004 MontaVista Software, Inc. 6 * Thumb alignment fault fixups (c) 2004 MontaVista Software, Inc.
7 * - Adapted from gdb/sim/arm/thumbemu.c -- Thumb instruction emulation. 7 * - Adapted from gdb/sim/arm/thumbemu.c -- Thumb instruction emulation.
8 * Copyright (C) 1996, Cygnus Software Technologies Ltd. 8 * Copyright (C) 1996, Cygnus Software Technologies Ltd.
9 * 9 *
diff --git a/arch/arm/mm/ioremap.c b/arch/arm/mm/ioremap.c
index d6167ad4e01..f3ade18862a 100644
--- a/arch/arm/mm/ioremap.c
+++ b/arch/arm/mm/ioremap.c
@@ -346,7 +346,7 @@ void __iounmap(volatile void __iomem *addr)
346#ifndef CONFIG_SMP 346#ifndef CONFIG_SMP
347 /* 347 /*
348 * If this is a section based mapping we need to handle it 348 * If this is a section based mapping we need to handle it
349 * specially as the VM subysystem does not know how to handle 349 * specially as the VM subsystem does not know how to handle
350 * such a beast. We need the lock here b/c we need to clear 350 * such a beast. We need the lock here b/c we need to clear
351 * all the mappings before the area can be reclaimed 351 * all the mappings before the area can be reclaimed
352 * by someone else. 352 * by someone else.
diff --git a/arch/arm/mm/mmap.c b/arch/arm/mm/mmap.c
index 2c4c2422cd1..2728b0e7d2b 100644
--- a/arch/arm/mm/mmap.c
+++ b/arch/arm/mm/mmap.c
@@ -5,7 +5,7 @@
5#include <linux/mm.h> 5#include <linux/mm.h>
6#include <linux/mman.h> 6#include <linux/mman.h>
7#include <linux/shm.h> 7#include <linux/shm.h>
8 8#include <linux/sched.h>
9#include <asm/system.h> 9#include <asm/system.h>
10 10
11#define COLOUR_ALIGN(addr,pgoff) \ 11#define COLOUR_ALIGN(addr,pgoff) \
diff --git a/arch/arm/mm/mmu.c b/arch/arm/mm/mmu.c
index 2ba1530d1ce..02e050ae59f 100644
--- a/arch/arm/mm/mmu.c
+++ b/arch/arm/mm/mmu.c
@@ -92,7 +92,7 @@ static struct cachepolicy cache_policies[] __initdata = {
92}; 92};
93 93
94/* 94/*
95 * These are useful for identifing cache coherency 95 * These are useful for identifying cache coherency
96 * problems by allowing the cache or the cache and 96 * problems by allowing the cache or the cache and
97 * writebuffer to be turned off. (Note: the write 97 * writebuffer to be turned off. (Note: the write
98 * buffer should not be on and the cache off). 98 * buffer should not be on and the cache off).
diff --git a/arch/arm/mm/proc-v7.S b/arch/arm/mm/proc-v7.S
index dd823dd4a37..718f4782ee8 100644
--- a/arch/arm/mm/proc-v7.S
+++ b/arch/arm/mm/proc-v7.S
@@ -256,7 +256,7 @@ __v7_proc_info:
256 .long HWCAP_SWP|HWCAP_HALF|HWCAP_THUMB|HWCAP_FAST_MULT|HWCAP_EDSP 256 .long HWCAP_SWP|HWCAP_HALF|HWCAP_THUMB|HWCAP_FAST_MULT|HWCAP_EDSP
257 .long cpu_v7_name 257 .long cpu_v7_name
258 .long v7_processor_functions 258 .long v7_processor_functions
259 .long v6wbi_tlb_fns 259 .long v7wbi_tlb_fns
260 .long v6_user_fns 260 .long v6_user_fns
261 .long v7_cache_fns 261 .long v7_cache_fns
262 .size __v7_proc_info, . - __v7_proc_info 262 .size __v7_proc_info, . - __v7_proc_info
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S
new file mode 100644
index 00000000000..b56dda8052f
--- /dev/null
+++ b/arch/arm/mm/tlb-v7.S
@@ -0,0 +1,88 @@
1/*
2 * linux/arch/arm/mm/tlb-v7.S
3 *
4 * Copyright (C) 1997-2002 Russell King
5 * Modified for ARMv7 by Catalin Marinas
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * ARM architecture version 6 TLB handling functions.
12 * These assume a split I/D TLB.
13 */
14#include <linux/linkage.h>
15#include <asm/asm-offsets.h>
16#include <asm/page.h>
17#include <asm/tlbflush.h>
18#include "proc-macros.S"
19
20/*
21 * v7wbi_flush_user_tlb_range(start, end, vma)
22 *
23 * Invalidate a range of TLB entries in the specified address space.
24 *
25 * - start - start address (may not be aligned)
26 * - end - end address (exclusive, may not be aligned)
27 * - vma - vma_struct describing address range
28 *
29 * It is assumed that:
30 * - the "Invalidate single entry" instruction will invalidate
31 * both the I and the D TLBs on Harvard-style TLBs
32 */
33ENTRY(v7wbi_flush_user_tlb_range)
34 vma_vm_mm r3, r2 @ get vma->vm_mm
35 mmid r3, r3 @ get vm_mm->context.id
36 dsb
37 mov r0, r0, lsr #PAGE_SHIFT @ align address
38 mov r1, r1, lsr #PAGE_SHIFT
39 asid r3, r3 @ mask ASID
40 orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA
41 mov r1, r1, lsl #PAGE_SHIFT
42 vma_vm_flags r2, r2 @ get vma->vm_flags
431:
44 mcr p15, 0, r0, c8, c6, 1 @ TLB invalidate D MVA (was 1)
45 tst r2, #VM_EXEC @ Executable area ?
46 mcrne p15, 0, r0, c8, c5, 1 @ TLB invalidate I MVA (was 1)
47 add r0, r0, #PAGE_SZ
48 cmp r0, r1
49 blo 1b
50 mov ip, #0
51 mcr p15, 0, ip, c7, c5, 6 @ flush BTAC/BTB
52 dsb
53 mov pc, lr
54
55/*
56 * v7wbi_flush_kern_tlb_range(start,end)
57 *
58 * Invalidate a range of kernel TLB entries
59 *
60 * - start - start address (may not be aligned)
61 * - end - end address (exclusive, may not be aligned)
62 */
63ENTRY(v7wbi_flush_kern_tlb_range)
64 dsb
65 mov r0, r0, lsr #PAGE_SHIFT @ align address
66 mov r1, r1, lsr #PAGE_SHIFT
67 mov r0, r0, lsl #PAGE_SHIFT
68 mov r1, r1, lsl #PAGE_SHIFT
691:
70 mcr p15, 0, r0, c8, c6, 1 @ TLB invalidate D MVA
71 mcr p15, 0, r0, c8, c5, 1 @ TLB invalidate I MVA
72 add r0, r0, #PAGE_SZ
73 cmp r0, r1
74 blo 1b
75 mov r2, #0
76 mcr p15, 0, r2, c7, c5, 6 @ flush BTAC/BTB
77 dsb
78 isb
79 mov pc, lr
80
81 .section ".text.init", #alloc, #execinstr
82
83 .type v7wbi_tlb_fns, #object
84ENTRY(v7wbi_tlb_fns)
85 .long v7wbi_flush_user_tlb_range
86 .long v7wbi_flush_kern_tlb_range
87 .long v6wbi_tlb_flags
88 .size v7wbi_tlb_fns, . - v7wbi_tlb_fns