aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2009-07-24 07:32:56 -0400
committerCatalin Marinas <catalin.marinas@arm.com>2009-07-24 07:32:56 -0400
commit347c8b70b1d5256e445e54e736f88d21877616cf (patch)
tree513ae480cdb8eb46ec68aab77ab78c5d8b3a205b /arch
parentb86040a59feb255a8193173caa4d5199464433d5 (diff)
Thumb-2: Implement the unified arch/arm/mm support
This patch adds the ARM/Thumb-2 unified support to the arch/arm/mm/* files. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/mm/alignment.c20
-rw-r--r--arch/arm/mm/cache-v7.S16
-rw-r--r--arch/arm/mm/proc-v7.S5
3 files changed, 30 insertions, 11 deletions
diff --git a/arch/arm/mm/alignment.c b/arch/arm/mm/alignment.c
index 03cd27d917b9..b270d6228fe2 100644
--- a/arch/arm/mm/alignment.c
+++ b/arch/arm/mm/alignment.c
@@ -159,7 +159,9 @@ union offset_union {
159 159
160#define __get8_unaligned_check(ins,val,addr,err) \ 160#define __get8_unaligned_check(ins,val,addr,err) \
161 __asm__( \ 161 __asm__( \
162 "1: "ins" %1, [%2], #1\n" \ 162 ARM( "1: "ins" %1, [%2], #1\n" ) \
163 THUMB( "1: "ins" %1, [%2]\n" ) \
164 THUMB( " add %2, %2, #1\n" ) \
163 "2:\n" \ 165 "2:\n" \
164 " .section .fixup,\"ax\"\n" \ 166 " .section .fixup,\"ax\"\n" \
165 " .align 2\n" \ 167 " .align 2\n" \
@@ -215,7 +217,9 @@ union offset_union {
215 do { \ 217 do { \
216 unsigned int err = 0, v = val, a = addr; \ 218 unsigned int err = 0, v = val, a = addr; \
217 __asm__( FIRST_BYTE_16 \ 219 __asm__( FIRST_BYTE_16 \
218 "1: "ins" %1, [%2], #1\n" \ 220 ARM( "1: "ins" %1, [%2], #1\n" ) \
221 THUMB( "1: "ins" %1, [%2]\n" ) \
222 THUMB( " add %2, %2, #1\n" ) \
219 " mov %1, %1, "NEXT_BYTE"\n" \ 223 " mov %1, %1, "NEXT_BYTE"\n" \
220 "2: "ins" %1, [%2]\n" \ 224 "2: "ins" %1, [%2]\n" \
221 "3:\n" \ 225 "3:\n" \
@@ -245,11 +249,17 @@ union offset_union {
245 do { \ 249 do { \
246 unsigned int err = 0, v = val, a = addr; \ 250 unsigned int err = 0, v = val, a = addr; \
247 __asm__( FIRST_BYTE_32 \ 251 __asm__( FIRST_BYTE_32 \
248 "1: "ins" %1, [%2], #1\n" \ 252 ARM( "1: "ins" %1, [%2], #1\n" ) \
253 THUMB( "1: "ins" %1, [%2]\n" ) \
254 THUMB( " add %2, %2, #1\n" ) \
249 " mov %1, %1, "NEXT_BYTE"\n" \ 255 " mov %1, %1, "NEXT_BYTE"\n" \
250 "2: "ins" %1, [%2], #1\n" \ 256 ARM( "2: "ins" %1, [%2], #1\n" ) \
257 THUMB( "2: "ins" %1, [%2]\n" ) \
258 THUMB( " add %2, %2, #1\n" ) \
251 " mov %1, %1, "NEXT_BYTE"\n" \ 259 " mov %1, %1, "NEXT_BYTE"\n" \
252 "3: "ins" %1, [%2], #1\n" \ 260 ARM( "3: "ins" %1, [%2], #1\n" ) \
261 THUMB( "3: "ins" %1, [%2]\n" ) \
262 THUMB( " add %2, %2, #1\n" ) \
253 " mov %1, %1, "NEXT_BYTE"\n" \ 263 " mov %1, %1, "NEXT_BYTE"\n" \
254 "4: "ins" %1, [%2]\n" \ 264 "4: "ins" %1, [%2]\n" \
255 "5:\n" \ 265 "5:\n" \
diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S
index be93ff02a98d..bda0ec31a4e2 100644
--- a/arch/arm/mm/cache-v7.S
+++ b/arch/arm/mm/cache-v7.S
@@ -21,7 +21,7 @@
21 * 21 *
22 * Flush the whole D-cache. 22 * Flush the whole D-cache.
23 * 23 *
24 * Corrupted registers: r0-r5, r7, r9-r11 24 * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode)
25 * 25 *
26 * - mm - mm_struct describing address space 26 * - mm - mm_struct describing address space
27 */ 27 */
@@ -51,8 +51,12 @@ loop1:
51loop2: 51loop2:
52 mov r9, r4 @ create working copy of max way size 52 mov r9, r4 @ create working copy of max way size
53loop3: 53loop3:
54 orr r11, r10, r9, lsl r5 @ factor way and cache number into r11 54 ARM( orr r11, r10, r9, lsl r5 ) @ factor way and cache number into r11
55 orr r11, r11, r7, lsl r2 @ factor index number into r11 55 THUMB( lsl r6, r9, r5 )
56 THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11
57 ARM( orr r11, r11, r7, lsl r2 ) @ factor index number into r11
58 THUMB( lsl r6, r7, r2 )
59 THUMB( orr r11, r11, r6 ) @ factor index number into r11
56 mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way 60 mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
57 subs r9, r9, #1 @ decrement the way 61 subs r9, r9, #1 @ decrement the way
58 bge loop3 62 bge loop3
@@ -82,11 +86,13 @@ ENDPROC(v7_flush_dcache_all)
82 * 86 *
83 */ 87 */
84ENTRY(v7_flush_kern_cache_all) 88ENTRY(v7_flush_kern_cache_all)
85 stmfd sp!, {r4-r5, r7, r9-r11, lr} 89 ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} )
90 THUMB( stmfd sp!, {r4-r7, r9-r11, lr} )
86 bl v7_flush_dcache_all 91 bl v7_flush_dcache_all
87 mov r0, #0 92 mov r0, #0
88 mcr p15, 0, r0, c7, c5, 0 @ I+BTB cache invalidate 93 mcr p15, 0, r0, c7, c5, 0 @ I+BTB cache invalidate
89 ldmfd sp!, {r4-r5, r7, r9-r11, lr} 94 ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} )
95 THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} )
90 mov pc, lr 96 mov pc, lr
91ENDPROC(v7_flush_kern_cache_all) 97ENDPROC(v7_flush_kern_cache_all)
92 98
diff --git a/arch/arm/mm/proc-v7.S b/arch/arm/mm/proc-v7.S
index 180a08d03a03..c19aecdb2021 100644
--- a/arch/arm/mm/proc-v7.S
+++ b/arch/arm/mm/proc-v7.S
@@ -127,7 +127,9 @@ ENDPROC(cpu_v7_switch_mm)
127 */ 127 */
128ENTRY(cpu_v7_set_pte_ext) 128ENTRY(cpu_v7_set_pte_ext)
129#ifdef CONFIG_MMU 129#ifdef CONFIG_MMU
130 str r1, [r0], #-2048 @ linux version 130 ARM( str r1, [r0], #-2048 ) @ linux version
131 THUMB( str r1, [r0] ) @ linux version
132 THUMB( sub r0, r0, #2048 )
131 133
132 bic r3, r1, #0x000003f0 134 bic r3, r1, #0x000003f0
133 bic r3, r3, #PTE_TYPE_MASK 135 bic r3, r3, #PTE_TYPE_MASK
@@ -273,6 +275,7 @@ __v7_setup:
273 mrc p15, 0, r0, c1, c0, 0 @ read control register 275 mrc p15, 0, r0, c1, c0, 0 @ read control register
274 bic r0, r0, r5 @ clear bits them 276 bic r0, r0, r5 @ clear bits them
275 orr r0, r0, r6 @ set them 277 orr r0, r0, r6 @ set them
278 THUMB( orr r0, r0, #1 << 30 ) @ Thumb exceptions
276 mov pc, lr @ return to head.S:__ret 279 mov pc, lr @ return to head.S:__ret
277ENDPROC(__v7_setup) 280ENDPROC(__v7_setup)
278 281