aboutsummaryrefslogtreecommitdiffstats
path: root/arch/m68k
diff options
context:
space:
mode:
Diffstat (limited to 'arch/m68k')
-rw-r--r--arch/m68k/include/asm/cacheflush_no.h4
-rw-r--r--arch/m68k/include/asm/entry_no.h59
-rw-r--r--arch/m68k/include/asm/m54xxacr.h5
-rw-r--r--arch/m68k/include/asm/mcfcache.h6
-rw-r--r--arch/m68k/include/asm/processor.h13
5 files changed, 52 insertions, 35 deletions
diff --git a/arch/m68k/include/asm/cacheflush_no.h b/arch/m68k/include/asm/cacheflush_no.h
index 9246392e1372..e295923020d3 100644
--- a/arch/m68k/include/asm/cacheflush_no.h
+++ b/arch/m68k/include/asm/cacheflush_no.h
@@ -37,7 +37,7 @@ static inline void __flush_cache_all(void)
37{ 37{
38#if defined(CONFIG_M523x) || defined(CONFIG_M527x) 38#if defined(CONFIG_M523x) || defined(CONFIG_M527x)
39 __asm__ __volatile__ ( 39 __asm__ __volatile__ (
40 "movel #0x81400100, %%d0\n\t" 40 "movel #0x81400110, %%d0\n\t"
41 "movec %%d0, %%CACR\n\t" 41 "movec %%d0, %%CACR\n\t"
42 "nop\n\t" 42 "nop\n\t"
43 : : : "d0" ); 43 : : : "d0" );
@@ -65,7 +65,7 @@ static inline void __flush_cache_all(void)
65#endif /* CONFIG_M5249 */ 65#endif /* CONFIG_M5249 */
66#ifdef CONFIG_M532x 66#ifdef CONFIG_M532x
67 __asm__ __volatile__ ( 67 __asm__ __volatile__ (
68 "movel #0x81000200, %%d0\n\t" 68 "movel #0x81000210, %%d0\n\t"
69 "movec %%d0, %%CACR\n\t" 69 "movec %%d0, %%CACR\n\t"
70 "nop\n\t" 70 "nop\n\t"
71 : : : "d0" ); 71 : : : "d0" );
diff --git a/arch/m68k/include/asm/entry_no.h b/arch/m68k/include/asm/entry_no.h
index 26be277394f9..627d69bacc58 100644
--- a/arch/m68k/include/asm/entry_no.h
+++ b/arch/m68k/include/asm/entry_no.h
@@ -42,12 +42,16 @@
42 */ 42 */
43 43
44#ifdef CONFIG_COLDFIRE 44#ifdef CONFIG_COLDFIRE
45#ifdef CONFIG_COLDFIRE_SW_A7
45/* 46/*
46 * This is made a little more tricky on the ColdFire. There is no 47 * This is made a little more tricky on older ColdFires. There is no
47 * separate kernel and user stack pointers. Need to artificially 48 * separate supervisor and user stack pointers. Need to artificially
48 * construct a usp in software... When doing this we need to disable 49 * construct a usp in software... When doing this we need to disable
49 * interrupts, otherwise bad things could happen. 50 * interrupts, otherwise bad things will happen.
50 */ 51 */
52.globl sw_usp
53.globl sw_ksp
54
51.macro SAVE_ALL 55.macro SAVE_ALL
52 move #0x2700,%sr /* disable intrs */ 56 move #0x2700,%sr /* disable intrs */
53 btst #5,%sp@(2) /* from user? */ 57 btst #5,%sp@(2) /* from user? */
@@ -74,9 +78,7 @@
74 7: 78 7:
75.endm 79.endm
76 80
77.macro RESTORE_ALL 81.macro RESTORE_USER
78 btst #5,%sp@(PT_SR) /* going user? */
79 bnes 8f /* no, skip */
80 move #0x2700,%sr /* disable intrs */ 82 move #0x2700,%sr /* disable intrs */
81 movel sw_usp,%a0 /* get usp */ 83 movel sw_usp,%a0 /* get usp */
82 movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */ 84 movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */
@@ -91,19 +93,22 @@
91 subql #8,sw_usp /* set exception */ 93 subql #8,sw_usp /* set exception */
92 movel sw_usp,%sp /* restore usp */ 94 movel sw_usp,%sp /* restore usp */
93 rte 95 rte
94 8:
95 moveml %sp@,%d1-%d5/%a0-%a2
96 lea %sp@(32),%sp /* space for 8 regs */
97 movel %sp@+,%d0
98 addql #4,%sp /* orig d0 */
99 addl %sp@+,%sp /* stkadj */
100 rte
101.endm 96.endm
102 97
98.macro RDUSP
99 movel sw_usp,%a2
100.endm
101
102.macro WRUSP
103 movel %a0,sw_usp
104.endm
105
106#else /* !CONFIG_COLDFIRE_SW_A7 */
103/* 107/*
104 * Quick exception save, use current stack only. 108 * Modern ColdFire parts have separate supervisor and user stack
109 * pointers. Simple load and restore macros for this case.
105 */ 110 */
106.macro SAVE_LOCAL 111.macro SAVE_ALL
107 move #0x2700,%sr /* disable intrs */ 112 move #0x2700,%sr /* disable intrs */
108 clrl %sp@- /* stkadj */ 113 clrl %sp@- /* stkadj */
109 movel %d0,%sp@- /* orig d0 */ 114 movel %d0,%sp@- /* orig d0 */
@@ -112,7 +117,7 @@
112 moveml %d1-%d5/%a0-%a2,%sp@ 117 moveml %d1-%d5/%a0-%a2,%sp@
113.endm 118.endm
114 119
115.macro RESTORE_LOCAL 120.macro RESTORE_USER
116 moveml %sp@,%d1-%d5/%a0-%a2 121 moveml %sp@,%d1-%d5/%a0-%a2
117 lea %sp@(32),%sp /* space for 8 regs */ 122 lea %sp@(32),%sp /* space for 8 regs */
118 movel %sp@+,%d0 123 movel %sp@+,%d0
@@ -121,6 +126,18 @@
121 rte 126 rte
122.endm 127.endm
123 128
129.macro RDUSP
130 /*move %usp,%a2*/
131 .word 0x4e6a
132.endm
133
134.macro WRUSP
135 /*move %a0,%usp*/
136 .word 0x4e60
137.endm
138
139#endif /* !CONFIG_COLDFIRE_SW_A7 */
140
124.macro SAVE_SWITCH_STACK 141.macro SAVE_SWITCH_STACK
125 lea %sp@(-24),%sp /* 6 regs */ 142 lea %sp@(-24),%sp /* 6 regs */
126 moveml %a3-%a6/%d6-%d7,%sp@ 143 moveml %a3-%a6/%d6-%d7,%sp@
@@ -131,14 +148,6 @@
131 lea %sp@(24),%sp /* 6 regs */ 148 lea %sp@(24),%sp /* 6 regs */
132.endm 149.endm
133 150
134/*
135 * Software copy of the user and kernel stack pointers... Ugh...
136 * Need these to get around ColdFire not having separate kernel
137 * and user stack pointers.
138 */
139.globl sw_usp
140.globl sw_ksp
141
142#else /* !CONFIG_COLDFIRE */ 151#else /* !CONFIG_COLDFIRE */
143 152
144/* 153/*
@@ -167,6 +176,6 @@
167 moveml %sp@+,%a3-%a6/%d6-%d7 176 moveml %sp@+,%a3-%a6/%d6-%d7
168.endm 177.endm
169 178
170#endif /* !CONFIG_COLDFIRE */ 179#endif /* !COLDFIRE_SW_A7 */
171#endif /* __ASSEMBLY__ */ 180#endif /* __ASSEMBLY__ */
172#endif /* __M68KNOMMU_ENTRY_H */ 181#endif /* __M68KNOMMU_ENTRY_H */
diff --git a/arch/m68k/include/asm/m54xxacr.h b/arch/m68k/include/asm/m54xxacr.h
index 76d64906aa62..12209c68b904 100644
--- a/arch/m68k/include/asm/m54xxacr.h
+++ b/arch/m68k/include/asm/m54xxacr.h
@@ -26,6 +26,7 @@
26#define CACR_IHLCK 0x00000800 /* Intruction cache half lock */ 26#define CACR_IHLCK 0x00000800 /* Intruction cache half lock */
27#define CACR_IDCM 0x00000400 /* Intruction cache inhibit */ 27#define CACR_IDCM 0x00000400 /* Intruction cache inhibit */
28#define CACR_ICINVA 0x00000100 /* Invalidate instr cache */ 28#define CACR_ICINVA 0x00000100 /* Invalidate instr cache */
29#define CACR_EUSP 0x00000020 /* Enable separate user a7 */
29 30
30#define ACR_BASE_POS 24 /* Address Base */ 31#define ACR_BASE_POS 24 /* Address Base */
31#define ACR_MASK_POS 16 /* Address Mask */ 32#define ACR_MASK_POS 16 /* Address Mask */
@@ -67,7 +68,11 @@
67 /* Enable data store buffer */ 68 /* Enable data store buffer */
68 /* outside ACRs : No cache, precise */ 69 /* outside ACRs : No cache, precise */
69 /* Enable instruction+branch caches */ 70 /* Enable instruction+branch caches */
71#if defined(CONFIG_M5407)
70#define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC) 72#define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC)
73#else
74#define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP)
75#endif
71 76
72#define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) 77#define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT)
73 78
diff --git a/arch/m68k/include/asm/mcfcache.h b/arch/m68k/include/asm/mcfcache.h
index 1b66018461b8..437686b62fea 100644
--- a/arch/m68k/include/asm/mcfcache.h
+++ b/arch/m68k/include/asm/mcfcache.h
@@ -46,7 +46,7 @@
46 movec %d0,%ACR0 46 movec %d0,%ACR0
47 movel #0x00000000,%d0 /* no other regions cached */ 47 movel #0x00000000,%d0 /* no other regions cached */
48 movec %d0,%ACR1 48 movec %d0,%ACR1
49 movel #0x80400100,%d0 /* configure cache */ 49 movel #0x80400110,%d0 /* configure cache */
50 movec %d0,%CACR /* enable cache */ 50 movec %d0,%CACR /* enable cache */
51 nop 51 nop
52.endm 52.endm
@@ -101,7 +101,7 @@
101 movec %d0,%ACR0 101 movec %d0,%ACR0
102 movel #0x00000000,%d0 /* no other regions cached */ 102 movel #0x00000000,%d0 /* no other regions cached */
103 movec %d0,%ACR1 103 movec %d0,%ACR1
104 movel #0x80000200,%d0 /* setup cache mask */ 104 movel #0x80000210,%d0 /* setup cache mask */
105 movec %d0,%CACR /* enable cache */ 105 movec %d0,%CACR /* enable cache */
106 nop 106 nop
107.endm 107.endm
@@ -142,7 +142,7 @@
142 movec %d0,%ACR0 142 movec %d0,%ACR0
143 move.l #0x00000000,%d0 /* no other regions cached */ 143 move.l #0x00000000,%d0 /* no other regions cached */
144 movec %d0,%ACR1 144 movec %d0,%ACR1
145 move.l #0x80400000,%d0 /* enable 8K instruction cache */ 145 move.l #0x80400010,%d0 /* enable 8K instruction cache */
146 movec %d0,%CACR 146 movec %d0,%CACR
147 nop 147 nop
148.endm 148.endm
diff --git a/arch/m68k/include/asm/processor.h b/arch/m68k/include/asm/processor.h
index 7a6a7590cc02..278c69bad57a 100644
--- a/arch/m68k/include/asm/processor.h
+++ b/arch/m68k/include/asm/processor.h
@@ -20,23 +20,26 @@
20 20
21static inline unsigned long rdusp(void) 21static inline unsigned long rdusp(void)
22{ 22{
23#ifdef CONFIG_COLDFIRE 23#ifdef CONFIG_COLDFIRE_SW_A7
24 extern unsigned int sw_usp; 24 extern unsigned int sw_usp;
25 return sw_usp; 25 return sw_usp;
26#else 26#else
27 unsigned long usp; 27 register unsigned long usp __asm__("a0");
28 __asm__ __volatile__("move %/usp,%0" : "=a" (usp)); 28 /* move %usp,%a0 */
29 __asm__ __volatile__(".word 0x4e68" : "=a" (usp));
29 return usp; 30 return usp;
30#endif 31#endif
31} 32}
32 33
33static inline void wrusp(unsigned long usp) 34static inline void wrusp(unsigned long usp)
34{ 35{
35#ifdef CONFIG_COLDFIRE 36#ifdef CONFIG_COLDFIRE_SW_A7
36 extern unsigned int sw_usp; 37 extern unsigned int sw_usp;
37 sw_usp = usp; 38 sw_usp = usp;
38#else 39#else
39 __asm__ __volatile__("move %0,%/usp" : : "a" (usp)); 40 register unsigned long a0 __asm__("a0") = usp;
41 /* move %a0,%usp */
42 __asm__ __volatile__(".word 0x4e60" : : "a" (a0) );
40#endif 43#endif
41} 44}
42 45