aboutsummaryrefslogtreecommitdiffstats
path: root/arch/m68k
diff options
context:
space:
mode:
authorGreg Ungerer <gerg@uclinux.org>2010-11-08 22:35:55 -0500
committerGreg Ungerer <gerg@uclinux.org>2011-01-05 00:19:18 -0500
commit8ce877a8eb8293b5b2c07f259d694026b0f519e4 (patch)
treed478523ee8bec4ecc638c9f9a35816fc12aaa37f /arch/m68k
parent3d461401eb5e3a8c471e92500aebd6c115273fba (diff)
m68knommu: clean up ColdFire cache control code
The cache control code for the ColdFire CPU's is a big ugly mess of "#ifdef"ery liberally coated with bit constants. Clean it up. The cache controllers in the various ColdFire parts are actually quite similar. Just differing in some bit flags and options supported. Using the header defines now in place it is pretty easy to factor out the small differences and use common setup and flush/invalidate code. I have preserved the cache setups as they where in the old code (except where obviously wrong - like in the case of the 5249). Following from this it should be easy now to extend the possible setups used on the CACHE controllers that support split cacheing or copy-back or write through options. Signed-off-by: Greg Ungerer <gerg@uclinux.org>
Diffstat (limited to 'arch/m68k')
-rw-r--r--arch/m68k/include/asm/cacheflush_no.h40
-rw-r--r--arch/m68k/include/asm/m52xxacr.h27
-rw-r--r--arch/m68k/include/asm/m53xxacr.h18
-rw-r--r--arch/m68k/include/asm/m54xxacr.h11
-rw-r--r--arch/m68k/include/asm/mcfcache.h150
5 files changed, 59 insertions, 187 deletions
diff --git a/arch/m68k/include/asm/cacheflush_no.h b/arch/m68k/include/asm/cacheflush_no.h
index 52b11ac9a30..8ada4ffc98e 100644
--- a/arch/m68k/include/asm/cacheflush_no.h
+++ b/arch/m68k/include/asm/cacheflush_no.h
@@ -2,7 +2,7 @@
2#define _M68KNOMMU_CACHEFLUSH_H 2#define _M68KNOMMU_CACHEFLUSH_H
3 3
4/* 4/*
5 * (C) Copyright 2000-2004, Greg Ungerer <gerg@snapgear.com> 5 * (C) Copyright 2000-2010, Greg Ungerer <gerg@snapgear.com>
6 */ 6 */
7#include <linux/mm.h> 7#include <linux/mm.h>
8#include <asm/mcfsim.h> 8#include <asm/mcfsim.h>
@@ -10,7 +10,7 @@
10#define flush_cache_all() __flush_cache_all() 10#define flush_cache_all() __flush_cache_all()
11#define flush_cache_mm(mm) do { } while (0) 11#define flush_cache_mm(mm) do { } while (0)
12#define flush_cache_dup_mm(mm) do { } while (0) 12#define flush_cache_dup_mm(mm) do { } while (0)
13#define flush_cache_range(vma, start, end) __flush_cache_all() 13#define flush_cache_range(vma, start, end) do { } while (0)
14#define flush_cache_page(vma, vmaddr) do { } while (0) 14#define flush_cache_page(vma, vmaddr) do { } while (0)
15#ifndef flush_dcache_range 15#ifndef flush_dcache_range
16#define flush_dcache_range(start,len) __flush_cache_all() 16#define flush_dcache_range(start,len) __flush_cache_all()
@@ -33,41 +33,13 @@
33#ifndef __flush_cache_all 33#ifndef __flush_cache_all
34static inline void __flush_cache_all(void) 34static inline void __flush_cache_all(void)
35{ 35{
36#if defined(CONFIG_M523x) || defined(CONFIG_M527x) 36#ifdef CACHE_INVALIDATE
37 __asm__ __volatile__ ( 37 __asm__ __volatile__ (
38 "movel #0x81400110, %%d0\n\t" 38 "movel %0, %%d0\n\t"
39 "movec %%d0, %%CACR\n\t" 39 "movec %%d0, %%CACR\n\t"
40 "nop\n\t" 40 "nop\n\t"
41 : : : "d0" ); 41 : : "i" (CACHE_INVALIDATE) : "d0" );
42#endif /* CONFIG_M523x || CONFIG_M527x */ 42#endif
43#if defined(CONFIG_M528x)
44 __asm__ __volatile__ (
45 "movel #0x81000200, %%d0\n\t"
46 "movec %%d0, %%CACR\n\t"
47 "nop\n\t"
48 : : : "d0" );
49#endif /* CONFIG_M528x */
50#if defined(CONFIG_M5206) || defined(CONFIG_M5206e) || defined(CONFIG_M5272)
51 __asm__ __volatile__ (
52 "movel #0x81000100, %%d0\n\t"
53 "movec %%d0, %%CACR\n\t"
54 "nop\n\t"
55 : : : "d0" );
56#endif /* CONFIG_M5206 || CONFIG_M5206e || CONFIG_M5272 */
57#ifdef CONFIG_M5249
58 __asm__ __volatile__ (
59 "movel #0xa1000200, %%d0\n\t"
60 "movec %%d0, %%CACR\n\t"
61 "nop\n\t"
62 : : : "d0" );
63#endif /* CONFIG_M5249 */
64#ifdef CONFIG_M532x
65 __asm__ __volatile__ (
66 "movel #0x81000210, %%d0\n\t"
67 "movec %%d0, %%CACR\n\t"
68 "nop\n\t"
69 : : : "d0" );
70#endif /* CONFIG_M532x */
71} 43}
72#endif /* __flush_cache_all */ 44#endif /* __flush_cache_all */
73 45
diff --git a/arch/m68k/include/asm/m52xxacr.h b/arch/m68k/include/asm/m52xxacr.h
index 4c92d999ee0..52230b5e1e4 100644
--- a/arch/m68k/include/asm/m52xxacr.h
+++ b/arch/m68k/include/asm/m52xxacr.h
@@ -52,5 +52,32 @@
52#define ACR_BWE 0x00000020 /* Write buffer enabled */ 52#define ACR_BWE 0x00000020 /* Write buffer enabled */
53#define ACR_WPROTECT 0x00000004 /* Write protect region */ 53#define ACR_WPROTECT 0x00000004 /* Write protect region */
54 54
55/*
56 * Set the cache controller settings we will use. This code is set to
57 * only use the instruction cache, even on the controllers that support
58 * split cache. (This setup is trying to preserve the existing behavior
59 * for now, in the furture I hope to actually use the split cache mode).
60 */
61#if defined(CONFIG_M5206) || defined(CONFIG_M5206e) || \
62 defined(CONFIG_M5249) || defined(CONFIG_M5272)
63#define CACHE_INIT (CACR_CINV)
64#define CACHE_MODE (CACR_CENB + CACR_DCM)
65#else
66#ifdef CONFIG_COLDFIRE_SW_A7
67#define CACHE_INIT (CACR_CINV + CACR_DISD)
68#define CACHE_MODE (CACR_CENB + CACR_DISD + CACR_DCM)
69#else
70#define CACHE_INIT (CACR_CINV + CACR_DISD + CACR_EUSP)
71#define CACHE_MODE (CACR_CENB + CACR_DISD + CACR_DCM + CACR_EUSP)
72#endif
73#endif
74
75#define CACHE_INVALIDATE (CACHE_MODE + CACR_CINV)
76
77#define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \
78 (0x000f0000) + \
79 (ACR_ENABLE + ACR_ANY + ACR_CENB + ACR_BWE))
80#define ACR1_MODE 0
81
55/****************************************************************************/ 82/****************************************************************************/
56#endif /* m52xxsim_h */ 83#endif /* m52xxsim_h */
diff --git a/arch/m68k/include/asm/m53xxacr.h b/arch/m68k/include/asm/m53xxacr.h
index 532fbb91185..74c81c9b177 100644
--- a/arch/m68k/include/asm/m53xxacr.h
+++ b/arch/m68k/include/asm/m53xxacr.h
@@ -48,5 +48,23 @@
48#define ACR_CM_IMPRE 0x00000060 /* Cache inhibited, imprecise */ 48#define ACR_CM_IMPRE 0x00000060 /* Cache inhibited, imprecise */
49#define ACR_WPROTECT 0x00000004 /* Write protect region */ 49#define ACR_WPROTECT 0x00000004 /* Write protect region */
50 50
51/*
52 * Set the cache controller settings we will use. This default in the
53 * CACR is cache inhibited, we use the ACR register to set cacheing
54 * enabled on the regions we want (eg RAM).
55 */
56#ifdef CONFIG_COLDFIRE_SW_A7
57#define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE)
58#else
59#define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE + CACR_EUSP)
60#endif
61
62#define CACHE_INIT CACR_CINVA
63
64#define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \
65 (0x000f0000) + \
66 (ACR_ENABLE + ACR_ANY + ACR_CM_CB))
67#define ACR1_MODE 0
68
51/****************************************************************************/ 69/****************************************************************************/
52#endif /* m53xxsim_h */ 70#endif /* m53xxsim_h */
diff --git a/arch/m68k/include/asm/m54xxacr.h b/arch/m68k/include/asm/m54xxacr.h
index 12209c68b90..3c81a7a34a8 100644
--- a/arch/m68k/include/asm/m54xxacr.h
+++ b/arch/m68k/include/asm/m54xxacr.h
@@ -73,11 +73,16 @@
73#else 73#else
74#define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP) 74#define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP)
75#endif 75#endif
76
77#define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) 76#define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT)
78
79#define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY) 77#define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY)
80 78
79#define CACHE_INIT (CACR_DCINVA+CACR_BCINVA+CACR_ICINVA)
80#define CACHE_INVALIDATE (CACHE_MODE+CACR_DCINVA+CACR_BCINVA+CACR_ICINVA)
81#define ACR0_MODE (0x000f0000+DATA_CACHE_MODE)
82#define ACR1_MODE 0
83#define ACR2_MODE (0x000f0000+INSN_CACHE_MODE)
84#define ACR3_MODE 0
85
81#ifndef __ASSEMBLY__ 86#ifndef __ASSEMBLY__
82 87
83#if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_WT) 88#if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_WT)
@@ -112,7 +117,7 @@ static inline void __m54xx_flush_cache_all(void)
112 : "i" (CACHE_LINE_SIZE), 117 : "i" (CACHE_LINE_SIZE),
113 "i" (DCACHE_SIZE / CACHE_WAYS), 118 "i" (DCACHE_SIZE / CACHE_WAYS),
114 "i" (CACHE_WAYS), 119 "i" (CACHE_WAYS),
115 "i" (CACHE_MODE|CACR_DCINVA|CACR_BCINVA|CACR_ICINVA) 120 "i" (CACHE_INVALIDATE)
116 : "d0", "a0" ); 121 : "d0", "a0" );
117} 122}
118 123
diff --git a/arch/m68k/include/asm/mcfcache.h b/arch/m68k/include/asm/mcfcache.h
deleted file mode 100644
index 2b3a6cfaaac..00000000000
--- a/arch/m68k/include/asm/mcfcache.h
+++ /dev/null
@@ -1,150 +0,0 @@
1/****************************************************************************/
2
3/*
4 * mcfcache.h -- ColdFire CPU cache support code
5 *
6 * (C) Copyright 2004, Greg Ungerer <gerg@snapgear.com>
7 */
8
9/****************************************************************************/
10#ifndef __M68KNOMMU_MCFCACHE_H
11#define __M68KNOMMU_MCFCACHE_H
12/****************************************************************************/
13
14
15/*
16 * The different ColdFire families have different cache arrangments.
17 * Everything from a small instruction only cache, to configurable
18 * data and/or instruction cache, to unified instruction/data, to
19 * harvard style separate instruction and data caches.
20 */
21
22#if defined(CONFIG_M5206) || defined(CONFIG_M5206e) || defined(CONFIG_M5272)
23/*
24 * Simple version 2 core cache. These have instruction cache only,
25 * we just need to invalidate it and enable it.
26 */
27.macro CACHE_ENABLE
28 movel #0x01000000,%d0 /* invalidate cache cmd */
29 movec %d0,%CACR /* do invalidate cache */
30 movel #0x80000100,%d0 /* setup cache mask */
31 movec %d0,%CACR /* enable cache */
32.endm
33#endif /* CONFIG_M5206 || CONFIG_M5206e || CONFIG_M5272 */
34
35#if defined(CONFIG_M523x) || defined(CONFIG_M527x)
36/*
37 * New version 2 cores have a configurable split cache arrangement.
38 * For now I am just enabling instruction cache - but ultimately I
39 * think a split instruction/data cache would be better.
40 */
41.macro CACHE_ENABLE
42 movel #0x01400000,%d0
43 movec %d0,%CACR /* invalidate cache */
44 nop
45 movel #0x0000c000,%d0 /* set SDRAM cached only */
46 movec %d0,%ACR0
47 movel #0x00000000,%d0 /* no other regions cached */
48 movec %d0,%ACR1
49 movel #0x80400110,%d0 /* configure cache */
50 movec %d0,%CACR /* enable cache */
51 nop
52.endm
53#endif /* CONFIG_M523x || CONFIG_M527x */
54
55#if defined(CONFIG_M528x)
56.macro CACHE_ENABLE
57 nop
58 movel #0x01000000, %d0
59 movec %d0, %CACR /* Invalidate cache */
60 nop
61 movel #0x0000c020, %d0 /* Set SDRAM cached only */
62 movec %d0, %ACR0
63 movel #0x00000000, %d0 /* No other regions cached */
64 movec %d0, %ACR1
65 movel #0x80000200, %d0 /* Setup cache mask */
66 movec %d0, %CACR /* Enable cache */
67 nop
68.endm
69#endif /* CONFIG_M528x */
70
71#if defined(CONFIG_M5249) || defined(CONFIG_M5307)
72/*
73 * The version 3 core cache. Oddly enough the version 2 core 5249
74 * has the same SDRAM and cache setup as the version 3 cores.
75 * This is a single unified instruction/data cache.
76 */
77.macro CACHE_ENABLE
78 movel #0x01000000,%d0 /* invalidate whole cache */
79 movec %d0,%CACR
80 nop
81#if defined(DEBUGGER_COMPATIBLE_CACHE) || defined(CONFIG_SECUREEDGEMP3)
82 movel #0x0000c000,%d0 /* set SDRAM cached (write-thru) */
83#else
84 movel #0x0000c020,%d0 /* set SDRAM cached (copyback) */
85#endif
86 movec %d0,%ACR0
87 movel #0x00000000,%d0 /* no other regions cached */
88 movec %d0,%ACR1
89 movel #0xa0000200,%d0 /* enable cache */
90 movec %d0,%CACR
91 nop
92.endm
93#endif /* CONFIG_M5249 || CONFIG_M5307 */
94
95#if defined(CONFIG_M532x)
96.macro CACHE_ENABLE
97 movel #0x01000000,%d0 /* invalidate cache cmd */
98 movec %d0,%CACR /* do invalidate cache */
99 nop
100 movel #0x4001C000,%d0 /* set SDRAM cached (write-thru) */
101 movec %d0,%ACR0
102 movel #0x00000000,%d0 /* no other regions cached */
103 movec %d0,%ACR1
104 movel #0x80000210,%d0 /* setup cache mask */
105 movec %d0,%CACR /* enable cache */
106 nop
107.endm
108#endif /* CONFIG_M532x */
109
110#if defined(CONFIG_M5407) || defined(CONFIG_M54xx)
111
112.macro CACHE_ENABLE
113 /* invalidate whole cache */
114 movel #(CACR_DCINVA+CACR_BCINVA+CACR_ICINVA),%d0
115 movec %d0,%CACR
116 nop
117 /* addresses range for data cache : 0x00000000-0x0fffffff */
118 movel #(0x000f0000+DATA_CACHE_MODE),%d0 /* set SDRAM cached */
119 movec %d0, %ACR0
120 movel #0x00000000,%d0 /* no other regions cached */
121 movec %d0, %ACR1
122 /* addresses range for instruction cache : 0x00000000-0x0fffffff */
123 movel #(0x000f0000+INSN_CACHE_MODE),%d0 /* set SDRAM cached */
124 movec %d0, %ACR2
125 movel #0x00000000,%d0 /* no other regions cached */
126 movec %d0, %ACR3
127 /* enable caches */
128 movel #(CACHE_MODE),%d0
129 movec %d0,%CACR
130 nop
131.endm
132#endif /* CONFIG_M5407 || CONFIG_M54xx */
133
134#if defined(CONFIG_M520x)
135.macro CACHE_ENABLE
136 move.l #0x01000000,%d0 /* invalidate whole cache */
137 movec %d0,%CACR
138 nop
139 move.l #0x0000c000,%d0 /* set SDRAM cached (write-thru) */
140 movec %d0,%ACR0
141 move.l #0x00000000,%d0 /* no other regions cached */
142 movec %d0,%ACR1
143 move.l #0x80400010,%d0 /* enable 8K instruction cache */
144 movec %d0,%CACR
145 nop
146.endm
147#endif /* CONFIG_M520x */
148
149/****************************************************************************/
150#endif /* __M68KNOMMU_MCFCACHE_H */