diff options
author | Greg Ungerer <gerg@uclinux.org> | 2010-11-03 23:53:26 -0400 |
---|---|---|
committer | Greg Ungerer <gerg@uclinux.org> | 2011-01-05 00:19:18 -0500 |
commit | 1c83af5f9d7e15a091f11394ad5916a7dcf1a99e (patch) | |
tree | aa41743fb552319bb53959a7df228233d4f04ba2 /arch | |
parent | 0762346034a3e94f9c3a5fe8d7c4bcaffbc1cd53 (diff) |
m68knommu: use user stack pointer hardware on some ColdFire cores
The more modern ColdFire parts (even if based on older version cores)
have separate user and supervisor stack pointers (a7 register).
Modify the ColdFire CPU setup and exception code to enable and use
this on parts that have it.
Signed-off-by: Greg Ungerer <gerg@uclinux.org>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/m68k/include/asm/cacheflush_no.h | 4 | ||||
-rw-r--r-- | arch/m68k/include/asm/entry_no.h | 59 | ||||
-rw-r--r-- | arch/m68k/include/asm/m54xxacr.h | 5 | ||||
-rw-r--r-- | arch/m68k/include/asm/mcfcache.h | 6 | ||||
-rw-r--r-- | arch/m68k/include/asm/processor.h | 13 | ||||
-rw-r--r-- | arch/m68knommu/Kconfig | 10 | ||||
-rw-r--r-- | arch/m68knommu/platform/coldfire/entry.S | 30 |
7 files changed, 72 insertions, 55 deletions
diff --git a/arch/m68k/include/asm/cacheflush_no.h b/arch/m68k/include/asm/cacheflush_no.h index 9246392e1372..e295923020d3 100644 --- a/arch/m68k/include/asm/cacheflush_no.h +++ b/arch/m68k/include/asm/cacheflush_no.h | |||
@@ -37,7 +37,7 @@ static inline void __flush_cache_all(void) | |||
37 | { | 37 | { |
38 | #if defined(CONFIG_M523x) || defined(CONFIG_M527x) | 38 | #if defined(CONFIG_M523x) || defined(CONFIG_M527x) |
39 | __asm__ __volatile__ ( | 39 | __asm__ __volatile__ ( |
40 | "movel #0x81400100, %%d0\n\t" | 40 | "movel #0x81400110, %%d0\n\t" |
41 | "movec %%d0, %%CACR\n\t" | 41 | "movec %%d0, %%CACR\n\t" |
42 | "nop\n\t" | 42 | "nop\n\t" |
43 | : : : "d0" ); | 43 | : : : "d0" ); |
@@ -65,7 +65,7 @@ static inline void __flush_cache_all(void) | |||
65 | #endif /* CONFIG_M5249 */ | 65 | #endif /* CONFIG_M5249 */ |
66 | #ifdef CONFIG_M532x | 66 | #ifdef CONFIG_M532x |
67 | __asm__ __volatile__ ( | 67 | __asm__ __volatile__ ( |
68 | "movel #0x81000200, %%d0\n\t" | 68 | "movel #0x81000210, %%d0\n\t" |
69 | "movec %%d0, %%CACR\n\t" | 69 | "movec %%d0, %%CACR\n\t" |
70 | "nop\n\t" | 70 | "nop\n\t" |
71 | : : : "d0" ); | 71 | : : : "d0" ); |
diff --git a/arch/m68k/include/asm/entry_no.h b/arch/m68k/include/asm/entry_no.h index 26be277394f9..627d69bacc58 100644 --- a/arch/m68k/include/asm/entry_no.h +++ b/arch/m68k/include/asm/entry_no.h | |||
@@ -42,12 +42,16 @@ | |||
42 | */ | 42 | */ |
43 | 43 | ||
44 | #ifdef CONFIG_COLDFIRE | 44 | #ifdef CONFIG_COLDFIRE |
45 | #ifdef CONFIG_COLDFIRE_SW_A7 | ||
45 | /* | 46 | /* |
46 | * This is made a little more tricky on the ColdFire. There is no | 47 | * This is made a little more tricky on older ColdFires. There is no |
47 | * separate kernel and user stack pointers. Need to artificially | 48 | * separate supervisor and user stack pointers. Need to artificially |
48 | * construct a usp in software... When doing this we need to disable | 49 | * construct a usp in software... When doing this we need to disable |
49 | * interrupts, otherwise bad things could happen. | 50 | * interrupts, otherwise bad things will happen. |
50 | */ | 51 | */ |
52 | .globl sw_usp | ||
53 | .globl sw_ksp | ||
54 | |||
51 | .macro SAVE_ALL | 55 | .macro SAVE_ALL |
52 | move #0x2700,%sr /* disable intrs */ | 56 | move #0x2700,%sr /* disable intrs */ |
53 | btst #5,%sp@(2) /* from user? */ | 57 | btst #5,%sp@(2) /* from user? */ |
@@ -74,9 +78,7 @@ | |||
74 | 7: | 78 | 7: |
75 | .endm | 79 | .endm |
76 | 80 | ||
77 | .macro RESTORE_ALL | 81 | .macro RESTORE_USER |
78 | btst #5,%sp@(PT_SR) /* going user? */ | ||
79 | bnes 8f /* no, skip */ | ||
80 | move #0x2700,%sr /* disable intrs */ | 82 | move #0x2700,%sr /* disable intrs */ |
81 | movel sw_usp,%a0 /* get usp */ | 83 | movel sw_usp,%a0 /* get usp */ |
82 | movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */ | 84 | movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */ |
@@ -91,19 +93,22 @@ | |||
91 | subql #8,sw_usp /* set exception */ | 93 | subql #8,sw_usp /* set exception */ |
92 | movel sw_usp,%sp /* restore usp */ | 94 | movel sw_usp,%sp /* restore usp */ |
93 | rte | 95 | rte |
94 | 8: | ||
95 | moveml %sp@,%d1-%d5/%a0-%a2 | ||
96 | lea %sp@(32),%sp /* space for 8 regs */ | ||
97 | movel %sp@+,%d0 | ||
98 | addql #4,%sp /* orig d0 */ | ||
99 | addl %sp@+,%sp /* stkadj */ | ||
100 | rte | ||
101 | .endm | 96 | .endm |
102 | 97 | ||
98 | .macro RDUSP | ||
99 | movel sw_usp,%a2 | ||
100 | .endm | ||
101 | |||
102 | .macro WRUSP | ||
103 | movel %a0,sw_usp | ||
104 | .endm | ||
105 | |||
106 | #else /* !CONFIG_COLDFIRE_SW_A7 */ | ||
103 | /* | 107 | /* |
104 | * Quick exception save, use current stack only. | 108 | * Modern ColdFire parts have separate supervisor and user stack |
109 | * pointers. Simple load and restore macros for this case. | ||
105 | */ | 110 | */ |
106 | .macro SAVE_LOCAL | 111 | .macro SAVE_ALL |
107 | move #0x2700,%sr /* disable intrs */ | 112 | move #0x2700,%sr /* disable intrs */ |
108 | clrl %sp@- /* stkadj */ | 113 | clrl %sp@- /* stkadj */ |
109 | movel %d0,%sp@- /* orig d0 */ | 114 | movel %d0,%sp@- /* orig d0 */ |
@@ -112,7 +117,7 @@ | |||
112 | moveml %d1-%d5/%a0-%a2,%sp@ | 117 | moveml %d1-%d5/%a0-%a2,%sp@ |
113 | .endm | 118 | .endm |
114 | 119 | ||
115 | .macro RESTORE_LOCAL | 120 | .macro RESTORE_USER |
116 | moveml %sp@,%d1-%d5/%a0-%a2 | 121 | moveml %sp@,%d1-%d5/%a0-%a2 |
117 | lea %sp@(32),%sp /* space for 8 regs */ | 122 | lea %sp@(32),%sp /* space for 8 regs */ |
118 | movel %sp@+,%d0 | 123 | movel %sp@+,%d0 |
@@ -121,6 +126,18 @@ | |||
121 | rte | 126 | rte |
122 | .endm | 127 | .endm |
123 | 128 | ||
129 | .macro RDUSP | ||
130 | /*move %usp,%a2*/ | ||
131 | .word 0x4e6a | ||
132 | .endm | ||
133 | |||
134 | .macro WRUSP | ||
135 | /*move %a0,%usp*/ | ||
136 | .word 0x4e60 | ||
137 | .endm | ||
138 | |||
139 | #endif /* !CONFIG_COLDFIRE_SW_A7 */ | ||
140 | |||
124 | .macro SAVE_SWITCH_STACK | 141 | .macro SAVE_SWITCH_STACK |
125 | lea %sp@(-24),%sp /* 6 regs */ | 142 | lea %sp@(-24),%sp /* 6 regs */ |
126 | moveml %a3-%a6/%d6-%d7,%sp@ | 143 | moveml %a3-%a6/%d6-%d7,%sp@ |
@@ -131,14 +148,6 @@ | |||
131 | lea %sp@(24),%sp /* 6 regs */ | 148 | lea %sp@(24),%sp /* 6 regs */ |
132 | .endm | 149 | .endm |
133 | 150 | ||
134 | /* | ||
135 | * Software copy of the user and kernel stack pointers... Ugh... | ||
136 | * Need these to get around ColdFire not having separate kernel | ||
137 | * and user stack pointers. | ||
138 | */ | ||
139 | .globl sw_usp | ||
140 | .globl sw_ksp | ||
141 | |||
142 | #else /* !CONFIG_COLDFIRE */ | 151 | #else /* !CONFIG_COLDFIRE */ |
143 | 152 | ||
144 | /* | 153 | /* |
@@ -167,6 +176,6 @@ | |||
167 | moveml %sp@+,%a3-%a6/%d6-%d7 | 176 | moveml %sp@+,%a3-%a6/%d6-%d7 |
168 | .endm | 177 | .endm |
169 | 178 | ||
170 | #endif /* !CONFIG_COLDFIRE */ | 179 | #endif /* !COLDFIRE_SW_A7 */ |
171 | #endif /* __ASSEMBLY__ */ | 180 | #endif /* __ASSEMBLY__ */ |
172 | #endif /* __M68KNOMMU_ENTRY_H */ | 181 | #endif /* __M68KNOMMU_ENTRY_H */ |
diff --git a/arch/m68k/include/asm/m54xxacr.h b/arch/m68k/include/asm/m54xxacr.h index 76d64906aa62..12209c68b904 100644 --- a/arch/m68k/include/asm/m54xxacr.h +++ b/arch/m68k/include/asm/m54xxacr.h | |||
@@ -26,6 +26,7 @@ | |||
26 | #define CACR_IHLCK 0x00000800 /* Intruction cache half lock */ | 26 | #define CACR_IHLCK 0x00000800 /* Intruction cache half lock */ |
27 | #define CACR_IDCM 0x00000400 /* Intruction cache inhibit */ | 27 | #define CACR_IDCM 0x00000400 /* Intruction cache inhibit */ |
28 | #define CACR_ICINVA 0x00000100 /* Invalidate instr cache */ | 28 | #define CACR_ICINVA 0x00000100 /* Invalidate instr cache */ |
29 | #define CACR_EUSP 0x00000020 /* Enable separate user a7 */ | ||
29 | 30 | ||
30 | #define ACR_BASE_POS 24 /* Address Base */ | 31 | #define ACR_BASE_POS 24 /* Address Base */ |
31 | #define ACR_MASK_POS 16 /* Address Mask */ | 32 | #define ACR_MASK_POS 16 /* Address Mask */ |
@@ -67,7 +68,11 @@ | |||
67 | /* Enable data store buffer */ | 68 | /* Enable data store buffer */ |
68 | /* outside ACRs : No cache, precise */ | 69 | /* outside ACRs : No cache, precise */ |
69 | /* Enable instruction+branch caches */ | 70 | /* Enable instruction+branch caches */ |
71 | #if defined(CONFIG_M5407) | ||
70 | #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC) | 72 | #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC) |
73 | #else | ||
74 | #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP) | ||
75 | #endif | ||
71 | 76 | ||
72 | #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) | 77 | #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) |
73 | 78 | ||
diff --git a/arch/m68k/include/asm/mcfcache.h b/arch/m68k/include/asm/mcfcache.h index 1b66018461b8..437686b62fea 100644 --- a/arch/m68k/include/asm/mcfcache.h +++ b/arch/m68k/include/asm/mcfcache.h | |||
@@ -46,7 +46,7 @@ | |||
46 | movec %d0,%ACR0 | 46 | movec %d0,%ACR0 |
47 | movel #0x00000000,%d0 /* no other regions cached */ | 47 | movel #0x00000000,%d0 /* no other regions cached */ |
48 | movec %d0,%ACR1 | 48 | movec %d0,%ACR1 |
49 | movel #0x80400100,%d0 /* configure cache */ | 49 | movel #0x80400110,%d0 /* configure cache */ |
50 | movec %d0,%CACR /* enable cache */ | 50 | movec %d0,%CACR /* enable cache */ |
51 | nop | 51 | nop |
52 | .endm | 52 | .endm |
@@ -101,7 +101,7 @@ | |||
101 | movec %d0,%ACR0 | 101 | movec %d0,%ACR0 |
102 | movel #0x00000000,%d0 /* no other regions cached */ | 102 | movel #0x00000000,%d0 /* no other regions cached */ |
103 | movec %d0,%ACR1 | 103 | movec %d0,%ACR1 |
104 | movel #0x80000200,%d0 /* setup cache mask */ | 104 | movel #0x80000210,%d0 /* setup cache mask */ |
105 | movec %d0,%CACR /* enable cache */ | 105 | movec %d0,%CACR /* enable cache */ |
106 | nop | 106 | nop |
107 | .endm | 107 | .endm |
@@ -142,7 +142,7 @@ | |||
142 | movec %d0,%ACR0 | 142 | movec %d0,%ACR0 |
143 | move.l #0x00000000,%d0 /* no other regions cached */ | 143 | move.l #0x00000000,%d0 /* no other regions cached */ |
144 | movec %d0,%ACR1 | 144 | movec %d0,%ACR1 |
145 | move.l #0x80400000,%d0 /* enable 8K instruction cache */ | 145 | move.l #0x80400010,%d0 /* enable 8K instruction cache */ |
146 | movec %d0,%CACR | 146 | movec %d0,%CACR |
147 | nop | 147 | nop |
148 | .endm | 148 | .endm |
diff --git a/arch/m68k/include/asm/processor.h b/arch/m68k/include/asm/processor.h index 7a6a7590cc02..278c69bad57a 100644 --- a/arch/m68k/include/asm/processor.h +++ b/arch/m68k/include/asm/processor.h | |||
@@ -20,23 +20,26 @@ | |||
20 | 20 | ||
21 | static inline unsigned long rdusp(void) | 21 | static inline unsigned long rdusp(void) |
22 | { | 22 | { |
23 | #ifdef CONFIG_COLDFIRE | 23 | #ifdef CONFIG_COLDFIRE_SW_A7 |
24 | extern unsigned int sw_usp; | 24 | extern unsigned int sw_usp; |
25 | return sw_usp; | 25 | return sw_usp; |
26 | #else | 26 | #else |
27 | unsigned long usp; | 27 | register unsigned long usp __asm__("a0"); |
28 | __asm__ __volatile__("move %/usp,%0" : "=a" (usp)); | 28 | /* move %usp,%a0 */ |
29 | __asm__ __volatile__(".word 0x4e68" : "=a" (usp)); | ||
29 | return usp; | 30 | return usp; |
30 | #endif | 31 | #endif |
31 | } | 32 | } |
32 | 33 | ||
33 | static inline void wrusp(unsigned long usp) | 34 | static inline void wrusp(unsigned long usp) |
34 | { | 35 | { |
35 | #ifdef CONFIG_COLDFIRE | 36 | #ifdef CONFIG_COLDFIRE_SW_A7 |
36 | extern unsigned int sw_usp; | 37 | extern unsigned int sw_usp; |
37 | sw_usp = usp; | 38 | sw_usp = usp; |
38 | #else | 39 | #else |
39 | __asm__ __volatile__("move %0,%/usp" : : "a" (usp)); | 40 | register unsigned long a0 __asm__("a0") = usp; |
41 | /* move %a0,%usp */ | ||
42 | __asm__ __volatile__(".word 0x4e60" : : "a" (a0) ); | ||
40 | #endif | 43 | #endif |
41 | } | 44 | } |
42 | 45 | ||
diff --git a/arch/m68knommu/Kconfig b/arch/m68knommu/Kconfig index 266a39eebcad..e6f482d18d05 100644 --- a/arch/m68knommu/Kconfig +++ b/arch/m68knommu/Kconfig | |||
@@ -75,6 +75,10 @@ config GENERIC_CLOCKEVENTS | |||
75 | config NO_IOPORT | 75 | config NO_IOPORT |
76 | def_bool y | 76 | def_bool y |
77 | 77 | ||
78 | config COLDFIRE_SW_A7 | ||
79 | bool | ||
80 | default n | ||
81 | |||
78 | source "init/Kconfig" | 82 | source "init/Kconfig" |
79 | 83 | ||
80 | source "kernel/Kconfig.freezer" | 84 | source "kernel/Kconfig.freezer" |
@@ -107,11 +111,13 @@ config M68360 | |||
107 | 111 | ||
108 | config M5206 | 112 | config M5206 |
109 | bool "MCF5206" | 113 | bool "MCF5206" |
114 | select COLDFIRE_SW_A7 | ||
110 | help | 115 | help |
111 | Motorola ColdFire 5206 processor support. | 116 | Motorola ColdFire 5206 processor support. |
112 | 117 | ||
113 | config M5206e | 118 | config M5206e |
114 | bool "MCF5206e" | 119 | bool "MCF5206e" |
120 | select COLDFIRE_SW_A7 | ||
115 | help | 121 | help |
116 | Motorola ColdFire 5206e processor support. | 122 | Motorola ColdFire 5206e processor support. |
117 | 123 | ||
@@ -129,6 +135,7 @@ config M523x | |||
129 | 135 | ||
130 | config M5249 | 136 | config M5249 |
131 | bool "MCF5249" | 137 | bool "MCF5249" |
138 | select COLDFIRE_SW_A7 | ||
132 | help | 139 | help |
133 | Motorola ColdFire 5249 processor support. | 140 | Motorola ColdFire 5249 processor support. |
134 | 141 | ||
@@ -139,6 +146,7 @@ config M5271 | |||
139 | 146 | ||
140 | config M5272 | 147 | config M5272 |
141 | bool "MCF5272" | 148 | bool "MCF5272" |
149 | select COLDFIRE_SW_A7 | ||
142 | help | 150 | help |
143 | Motorola ColdFire 5272 processor support. | 151 | Motorola ColdFire 5272 processor support. |
144 | 152 | ||
@@ -155,6 +163,7 @@ config M528x | |||
155 | 163 | ||
156 | config M5307 | 164 | config M5307 |
157 | bool "MCF5307" | 165 | bool "MCF5307" |
166 | select COLDFIRE_SW_A7 | ||
158 | help | 167 | help |
159 | Motorola ColdFire 5307 processor support. | 168 | Motorola ColdFire 5307 processor support. |
160 | 169 | ||
@@ -165,6 +174,7 @@ config M532x | |||
165 | 174 | ||
166 | config M5407 | 175 | config M5407 |
167 | bool "MCF5407" | 176 | bool "MCF5407" |
177 | select COLDFIRE_SW_A7 | ||
168 | help | 178 | help |
169 | Motorola ColdFire 5407 processor support. | 179 | Motorola ColdFire 5407 processor support. |
170 | 180 | ||
diff --git a/arch/m68knommu/platform/coldfire/entry.S b/arch/m68knommu/platform/coldfire/entry.S index b9ce31966181..f90e6173ccd4 100644 --- a/arch/m68knommu/platform/coldfire/entry.S +++ b/arch/m68knommu/platform/coldfire/entry.S | |||
@@ -36,13 +36,16 @@ | |||
36 | #include <asm/asm-offsets.h> | 36 | #include <asm/asm-offsets.h> |
37 | #include <asm/entry.h> | 37 | #include <asm/entry.h> |
38 | 38 | ||
39 | #ifdef CONFIG_COLDFIRE_SW_A7 | ||
40 | /* | ||
41 | * Define software copies of the supervisor and user stack pointers. | ||
42 | */ | ||
39 | .bss | 43 | .bss |
40 | |||
41 | sw_ksp: | 44 | sw_ksp: |
42 | .long 0 | 45 | .long 0 |
43 | |||
44 | sw_usp: | 46 | sw_usp: |
45 | .long 0 | 47 | .long 0 |
48 | #endif /* CONFIG_COLDFIRE_SW_A7 */ | ||
46 | 49 | ||
47 | .text | 50 | .text |
48 | 51 | ||
@@ -52,6 +55,7 @@ sw_usp: | |||
52 | .globl ret_from_signal | 55 | .globl ret_from_signal |
53 | .globl sys_call_table | 56 | .globl sys_call_table |
54 | .globl inthandler | 57 | .globl inthandler |
58 | .globl fasthandler | ||
55 | 59 | ||
56 | enosys: | 60 | enosys: |
57 | mov.l #sys_ni_syscall,%d3 | 61 | mov.l #sys_ni_syscall,%d3 |
@@ -138,20 +142,7 @@ Luser_return: | |||
138 | jne Lwork_to_do /* still work to do */ | 142 | jne Lwork_to_do /* still work to do */ |
139 | 143 | ||
140 | Lreturn: | 144 | Lreturn: |
141 | move #0x2700,%sr /* disable intrs */ | 145 | RESTORE_USER |
142 | movel sw_usp,%a0 /* get usp */ | ||
143 | movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */ | ||
144 | movel %sp@(PT_OFF_FORMATVEC),%a0@- /* copy exception format/vector/sr */ | ||
145 | moveml %sp@,%d1-%d5/%a0-%a2 | ||
146 | lea %sp@(32),%sp /* space for 8 regs */ | ||
147 | movel %sp@+,%d0 | ||
148 | addql #4,%sp /* orig d0 */ | ||
149 | addl %sp@+,%sp /* stk adj */ | ||
150 | addql #8,%sp /* remove exception */ | ||
151 | movel %sp,sw_ksp /* save ksp */ | ||
152 | subql #8,sw_usp /* set exception */ | ||
153 | movel sw_usp,%sp /* restore usp */ | ||
154 | rte | ||
155 | 146 | ||
156 | Lwork_to_do: | 147 | Lwork_to_do: |
157 | movel %a0@(TI_FLAGS),%d1 /* get thread_info->flags */ | 148 | movel %a0@(TI_FLAGS),%d1 /* get thread_info->flags */ |
@@ -201,9 +192,8 @@ ENTRY(inthandler) | |||
201 | */ | 192 | */ |
202 | ENTRY(resume) | 193 | ENTRY(resume) |
203 | movel %a0, %d1 /* get prev thread in d1 */ | 194 | movel %a0, %d1 /* get prev thread in d1 */ |
204 | 195 | RDUSP | |
205 | movel sw_usp,%d0 /* save usp */ | 196 | movel %a2,%a0@(TASK_THREAD+THREAD_USP) |
206 | movel %d0,%a0@(TASK_THREAD+THREAD_USP) | ||
207 | 197 | ||
208 | SAVE_SWITCH_STACK | 198 | SAVE_SWITCH_STACK |
209 | movel %sp,%a0@(TASK_THREAD+THREAD_KSP) /* save kernel stack pointer */ | 199 | movel %sp,%a0@(TASK_THREAD+THREAD_KSP) /* save kernel stack pointer */ |
@@ -211,5 +201,5 @@ ENTRY(resume) | |||
211 | RESTORE_SWITCH_STACK | 201 | RESTORE_SWITCH_STACK |
212 | 202 | ||
213 | movel %a1@(TASK_THREAD+THREAD_USP),%a0 /* restore thread user stack */ | 203 | movel %a1@(TASK_THREAD+THREAD_USP),%a0 /* restore thread user stack */ |
214 | movel %a0, sw_usp | 204 | WRUSP |
215 | rts | 205 | rts |