aboutsummaryrefslogtreecommitdiffstats
path: root/arch/m68k/include
diff options
context:
space:
mode:
authorGreg Ungerer <gerg@uclinux.org>2010-11-10 00:22:19 -0500
committerGreg Ungerer <gerg@uclinux.org>2011-01-05 00:19:20 -0500
commit07ffee59a756e3d16295fa1e0a4849c7a2273a13 (patch)
tree07161a81e704f140d1cccd424d9f99df5490908b /arch/m68k/include
parent4a5bae416562a8224707a1ff30e83ddab1474fb3 (diff)
m68knommu: create optimal separate instruction and data cache for ColdFire
Create separate functions to deal with instruction and data cache flushing. This way we can optimize them for the vairous cache types and arrangements used across the ColdFire family. For example the unified caches in the version 3 cores means we don't need to flush the instruction cache. For the version 2 cores that do not do data cacheing (or where we choose instruction cache only) we don't need to do any data flushing. Signed-off-by: Greg Ungerer <gerg@uclinux.org>
Diffstat (limited to 'arch/m68k/include')
-rw-r--r--arch/m68k/include/asm/cacheflush_no.h38
-rw-r--r--arch/m68k/include/asm/m52xxacr.h31
-rw-r--r--arch/m68k/include/asm/m53xxacr.h27
-rw-r--r--arch/m68k/include/asm/m54xxacr.h7
4 files changed, 83 insertions, 20 deletions
diff --git a/arch/m68k/include/asm/cacheflush_no.h b/arch/m68k/include/asm/cacheflush_no.h
index f931e1829835..cb88aa96c4f1 100644
--- a/arch/m68k/include/asm/cacheflush_no.h
+++ b/arch/m68k/include/asm/cacheflush_no.h
@@ -12,14 +12,12 @@
12#define flush_cache_dup_mm(mm) do { } while (0) 12#define flush_cache_dup_mm(mm) do { } while (0)
13#define flush_cache_range(vma, start, end) do { } while (0) 13#define flush_cache_range(vma, start, end) do { } while (0)
14#define flush_cache_page(vma, vmaddr) do { } while (0) 14#define flush_cache_page(vma, vmaddr) do { } while (0)
15#ifndef flush_dcache_range 15#define flush_dcache_range(start, len) __flush_dcache_all()
16#define flush_dcache_range(start,len) __flush_cache_all()
17#endif
18#define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 0 16#define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 0
19#define flush_dcache_page(page) do { } while (0) 17#define flush_dcache_page(page) do { } while (0)
20#define flush_dcache_mmap_lock(mapping) do { } while (0) 18#define flush_dcache_mmap_lock(mapping) do { } while (0)
21#define flush_dcache_mmap_unlock(mapping) do { } while (0) 19#define flush_dcache_mmap_unlock(mapping) do { } while (0)
22#define flush_icache_range(start,len) __flush_cache_all() 20#define flush_icache_range(start, len) __flush_icache_all()
23#define flush_icache_page(vma,pg) do { } while (0) 21#define flush_icache_page(vma,pg) do { } while (0)
24#define flush_icache_user_range(vma,pg,adr,len) do { } while (0) 22#define flush_icache_user_range(vma,pg,adr,len) do { } while (0)
25#define flush_cache_vmap(start, end) do { } while (0) 23#define flush_cache_vmap(start, end) do { } while (0)
@@ -46,4 +44,36 @@ static inline void __flush_cache_all(void)
46#endif 44#endif
47} 45}
48 46
47/*
48 * Some ColdFire parts implement separate instruction and data caches,
49 * on those we should just flush the appropriate cache. If we don't need
50 * to do any specific flushing then this will be optimized away.
51 */
52static inline void __flush_icache_all(void)
53{
54#ifdef CACHE_INVALIDATEI
55 __asm__ __volatile__ (
56 "movel %0, %%d0\n\t"
57 "movec %%d0, %%CACR\n\t"
58 "nop\n\t"
59 : : "i" (CACHE_INVALIDATEI) : "d0" );
60#endif
61}
62
63static inline void __flush_dcache_all(void)
64{
65#ifdef CACHE_PUSH
66 mcf_cache_push();
67#endif
68#ifdef CACHE_INVALIDATED
69 __asm__ __volatile__ (
70 "movel %0, %%d0\n\t"
71 "movec %%d0, %%CACR\n\t"
72 "nop\n\t"
73 : : "i" (CACHE_INVALIDATED) : "d0" );
74#else
75 /* Flush the wrtite buffer */
76 __asm__ __volatile__ ( "nop" );
77#endif
78}
49#endif /* _M68KNOMMU_CACHEFLUSH_H */ 79#endif /* _M68KNOMMU_CACHEFLUSH_H */
diff --git a/arch/m68k/include/asm/m52xxacr.h b/arch/m68k/include/asm/m52xxacr.h
index 701f680bced9..abc391a9ae8d 100644
--- a/arch/m68k/include/asm/m52xxacr.h
+++ b/arch/m68k/include/asm/m52xxacr.h
@@ -59,22 +59,31 @@
59 * that as on. 59 * that as on.
60 */ 60 */
61#if defined(CONFIG_CACHE_I) 61#if defined(CONFIG_CACHE_I)
62#define CACHE_TYPE CACR_DISD 62#define CACHE_TYPE (CACR_DISD + CACR_EUSP)
63#define CACHE_INVTYPEI 0
63#elif defined(CONFIG_CACHE_D) 64#elif defined(CONFIG_CACHE_D)
64#define CACHE_TYPE CACR_DISI 65#define CACHE_TYPE (CACR_DISI + CACR_EUSP)
66#define CACHE_INVTYPED 0
67#elif defined(CONFIG_CACHE_BOTH)
68#define CACHE_TYPE CACR_EUSP
69#define CACHE_INVTYPEI CACR_INVI
70#define CACHE_INVTYPED CACR_INVD
65#else 71#else
66#define CACHE_TYPE 72/* This is the instruction cache only devices (no split cache, no eusp) */
73#define CACHE_TYPE 0
74#define CACHE_INVTYPEI 0
67#endif 75#endif
68 76
69#if defined(CONFIG_HAVE_CACHE_SPLIT) 77#define CACHE_INIT (CACR_CINV + CACHE_TYPE)
70#define CACHE_INIT (CACR_CINV + CACHE_TYPE + CACR_EUSP) 78#define CACHE_MODE (CACR_CENB + CACHE_TYPE + CACR_DCM)
71#define CACHE_MODE (CACR_CENB + CACHE_TYPE + CACR_DCM + CACR_EUSP)
72#else
73#define CACHE_INIT (CACR_CINV)
74#define CACHE_MODE (CACR_CENB + CACR_DCM)
75#endif
76 79
77#define CACHE_INVALIDATE (CACHE_MODE + CACR_CINV) 80#define CACHE_INVALIDATE (CACHE_MODE + CACR_CINV)
81#if defined(CACHE_INVTYPEI)
82#define CACHE_INVALIDATEI (CACHE_MODE + CACR_CINV + CACHE_INVTYPEI)
83#endif
84#if defined(CACHE_INVTYPED)
85#define CACHE_INVALIDATED (CACHE_MODE + CACR_CINV + CACHE_INVTYPED)
86#endif
78 87
79#define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \ 88#define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \
80 (0x000f0000) + \ 89 (0x000f0000) + \
diff --git a/arch/m68k/include/asm/m53xxacr.h b/arch/m68k/include/asm/m53xxacr.h
index d8b8dd68368f..cd952b0a8bd3 100644
--- a/arch/m68k/include/asm/m53xxacr.h
+++ b/arch/m68k/include/asm/m53xxacr.h
@@ -49,12 +49,29 @@
49#define ACR_WPROTECT 0x00000004 /* Write protect region */ 49#define ACR_WPROTECT 0x00000004 /* Write protect region */
50 50
51/* 51/*
52 * Define the cache type and arrangement (needed for pushes).
53 */
54#if defined(CONFIG_M5307)
55#define CACHE_SIZE 0x2000 /* 8k of unified cache */
56#define ICACHE_SIZE CACHE_SIZE
57#define DCACHE_SIZE CACHE_SIZE
58#elif defined(CONFIG_M532x)
59#define CACHE_SIZE 0x4000 /* 32k of unified cache */
60#define ICACHE_SIZE CACHE_SIZE
61#define DCACHE_SIZE CACHE_SIZE
62#endif
63
64#define CACHE_LINE_SIZE 16 /* 16 byte line size */
65#define CACHE_WAYS 4 /* 4 ways - set associative */
66
67/*
52 * Set the cache controller settings we will use. This default in the 68 * Set the cache controller settings we will use. This default in the
53 * CACR is cache inhibited, we use the ACR register to set cacheing 69 * CACR is cache inhibited, we use the ACR register to set cacheing
54 * enabled on the regions we want (eg RAM). 70 * enabled on the regions we want (eg RAM).
55 */ 71 */
56#if defined(CONFIG_CACHE_COPYBACK) 72#if defined(CONFIG_CACHE_COPYBACK)
57#define CACHE_TYPE ACR_CM_CB 73#define CACHE_TYPE ACR_CM_CB
74#define CACHE_PUSH
58#else 75#else
59#define CACHE_TYPE ACR_CM_WT 76#define CACHE_TYPE ACR_CM_WT
60#endif 77#endif
@@ -65,7 +82,15 @@
65#define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE + CACR_EUSP) 82#define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE + CACR_EUSP)
66#endif 83#endif
67 84
68#define CACHE_INIT CACR_CINVA 85/*
86 * Unified cache means we will never need to flush for coherency of
87 * instruction fetch. We will need to flush to maintain memory/DMA
88 * coherency though in all cases. And for copyback caches we will need
89 * to push cached data as well.
90 */
91#define CACHE_INIT CACR_CINVA
92#define CACHE_INVALIDATE CACR_CINVA
93#define CACHE_INVALIDATED CACR_CINVA
69 94
70#define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \ 95#define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \
71 (0x000f0000) + \ 96 (0x000f0000) + \
diff --git a/arch/m68k/include/asm/m54xxacr.h b/arch/m68k/include/asm/m54xxacr.h
index 29d4713f796b..16a1835f9b2a 100644
--- a/arch/m68k/include/asm/m54xxacr.h
+++ b/arch/m68k/include/asm/m54xxacr.h
@@ -81,15 +81,14 @@
81#define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY) 81#define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY)
82 82
83#define CACHE_INIT (CACR_DCINVA+CACR_BCINVA+CACR_ICINVA) 83#define CACHE_INIT (CACR_DCINVA+CACR_BCINVA+CACR_ICINVA)
84#define CACHE_INVALIDATE (CACHE_MODE+CACR_DCINVA+CACR_BCINVA+CACR_ICINVA) 84#define CACHE_INVALIDATE (CACHE_MODE+CACR_DCINVA+CACR_BCINVA+CACR_ICINVA)
85#define CACHE_INVALIDATEI (CACHE_MODE+CACR_BCINVA+CACR_ICINVA)
86#define CACHE_INVALIDATED (CACHE_MODE+CACR_DCINVA)
85#define ACR0_MODE (0x000f0000+DATA_CACHE_MODE) 87#define ACR0_MODE (0x000f0000+DATA_CACHE_MODE)
86#define ACR1_MODE 0 88#define ACR1_MODE 0
87#define ACR2_MODE (0x000f0000+INSN_CACHE_MODE) 89#define ACR2_MODE (0x000f0000+INSN_CACHE_MODE)
88#define ACR3_MODE 0 90#define ACR3_MODE 0
89 91
90#if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_WT)
91#define flush_dcache_range(a, l) do { asm("nop"); } while (0)
92#endif
93#if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_CP) 92#if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_CP)
94/* Copyback cache mode must push dirty cache lines first */ 93/* Copyback cache mode must push dirty cache lines first */
95#define CACHE_PUSH 94#define CACHE_PUSH