aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/r4kcache.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/r4kcache.h')
-rw-r--r--include/asm-mips/r4kcache.h72
1 files changed, 36 insertions, 36 deletions
diff --git a/include/asm-mips/r4kcache.h b/include/asm-mips/r4kcache.h
index 5bea49feec66..a5ea9d828aee 100644
--- a/include/asm-mips/r4kcache.h
+++ b/include/asm-mips/r4kcache.h
@@ -21,7 +21,7 @@
21 * 21 *
22 * - The MIPS32 and MIPS64 specs permit an implementation to directly derive 22 * - The MIPS32 and MIPS64 specs permit an implementation to directly derive
23 * the index bits from the virtual address. This breaks with tradition 23 * the index bits from the virtual address. This breaks with tradition
24 * set by the R4000. To keep unpleassant surprises from happening we pick 24 * set by the R4000. To keep unpleasant surprises from happening we pick
25 * an address in KSEG0 / CKSEG0. 25 * an address in KSEG0 / CKSEG0.
26 * - We need a properly sign extended address for 64-bit code. To get away 26 * - We need a properly sign extended address for 64-bit code. To get away
27 * without ifdefs we let the compiler do it by a type cast. 27 * without ifdefs we let the compiler do it by a type cast.
@@ -30,11 +30,11 @@
30 30
31#define cache_op(op,addr) \ 31#define cache_op(op,addr) \
32 __asm__ __volatile__( \ 32 __asm__ __volatile__( \
33 " .set push \n" \
33 " .set noreorder \n" \ 34 " .set noreorder \n" \
34 " .set mips3\n\t \n" \ 35 " .set mips3\n\t \n" \
35 " cache %0, %1 \n" \ 36 " cache %0, %1 \n" \
36 " .set mips0 \n" \ 37 " .set pop \n" \
37 " .set reorder" \
38 : \ 38 : \
39 : "i" (op), "m" (*(unsigned char *)(addr))) 39 : "i" (op), "m" (*(unsigned char *)(addr)))
40 40
@@ -84,14 +84,14 @@ static inline void flush_scache_line(unsigned long addr)
84static inline void protected_flush_icache_line(unsigned long addr) 84static inline void protected_flush_icache_line(unsigned long addr)
85{ 85{
86 __asm__ __volatile__( 86 __asm__ __volatile__(
87 ".set noreorder\n\t" 87 " .set push \n"
88 ".set mips3\n" 88 " .set noreorder \n"
89 "1:\tcache %0,(%1)\n" 89 " .set mips3 \n"
90 "2:\t.set mips0\n\t" 90 "1: cache %0, (%1) \n"
91 ".set reorder\n\t" 91 "2: .set pop \n"
92 ".section\t__ex_table,\"a\"\n\t" 92 " .section __ex_table,\"a\" \n"
93 STR(PTR)"\t1b,2b\n\t" 93 " "STR(PTR)" 1b, 2b \n"
94 ".previous" 94 " .previous"
95 : 95 :
96 : "i" (Hit_Invalidate_I), "r" (addr)); 96 : "i" (Hit_Invalidate_I), "r" (addr));
97} 97}
@@ -100,19 +100,19 @@ static inline void protected_flush_icache_line(unsigned long addr)
100 * R10000 / R12000 hazard - these processors don't support the Hit_Writeback_D 100 * R10000 / R12000 hazard - these processors don't support the Hit_Writeback_D
101 * cacheop so we use Hit_Writeback_Inv_D which is supported by all R4000-style 101 * cacheop so we use Hit_Writeback_Inv_D which is supported by all R4000-style
102 * caches. We're talking about one cacheline unnecessarily getting invalidated 102 * caches. We're talking about one cacheline unnecessarily getting invalidated
103 * here so the penaltiy isn't overly hard. 103 * here so the penalty isn't overly hard.
104 */ 104 */
105static inline void protected_writeback_dcache_line(unsigned long addr) 105static inline void protected_writeback_dcache_line(unsigned long addr)
106{ 106{
107 __asm__ __volatile__( 107 __asm__ __volatile__(
108 ".set noreorder\n\t" 108 " .set push \n"
109 ".set mips3\n" 109 " .set noreorder \n"
110 "1:\tcache %0,(%1)\n" 110 " .set mips3 \n"
111 "2:\t.set mips0\n\t" 111 "1: cache %0, (%1) \n"
112 ".set reorder\n\t" 112 "2: .set pop \n"
113 ".section\t__ex_table,\"a\"\n\t" 113 " .section __ex_table,\"a\" \n"
114 STR(PTR)"\t1b,2b\n\t" 114 " "STR(PTR)" 1b, 2b \n"
115 ".previous" 115 " .previous"
116 : 116 :
117 : "i" (Hit_Writeback_Inv_D), "r" (addr)); 117 : "i" (Hit_Writeback_Inv_D), "r" (addr));
118} 118}
@@ -120,14 +120,14 @@ static inline void protected_writeback_dcache_line(unsigned long addr)
120static inline void protected_writeback_scache_line(unsigned long addr) 120static inline void protected_writeback_scache_line(unsigned long addr)
121{ 121{
122 __asm__ __volatile__( 122 __asm__ __volatile__(
123 ".set noreorder\n\t" 123 " .set push \n"
124 ".set mips3\n" 124 " .set noreorder \n"
125 "1:\tcache %0,(%1)\n" 125 " .set mips3 \n"
126 "2:\t.set mips0\n\t" 126 "1: cache %0, (%1) \n"
127 ".set reorder\n\t" 127 "2: .set pop \n"
128 ".section\t__ex_table,\"a\"\n\t" 128 " .section __ex_table,\"a\" \n"
129 STR(PTR)"\t1b,2b\n\t" 129 " "STR(PTR)" 1b, 2b \n"
130 ".previous" 130 " .previous"
131 : 131 :
132 : "i" (Hit_Writeback_Inv_SD), "r" (addr)); 132 : "i" (Hit_Writeback_Inv_SD), "r" (addr));
133} 133}
@@ -142,6 +142,7 @@ static inline void invalidate_tcache_page(unsigned long addr)
142 142
143#define cache16_unroll32(base,op) \ 143#define cache16_unroll32(base,op) \
144 __asm__ __volatile__( \ 144 __asm__ __volatile__( \
145 " .set push \n" \
145 " .set noreorder \n" \ 146 " .set noreorder \n" \
146 " .set mips3 \n" \ 147 " .set mips3 \n" \
147 " cache %1, 0x000(%0); cache %1, 0x010(%0) \n" \ 148 " cache %1, 0x000(%0); cache %1, 0x010(%0) \n" \
@@ -160,8 +161,7 @@ static inline void invalidate_tcache_page(unsigned long addr)
160 " cache %1, 0x1a0(%0); cache %1, 0x1b0(%0) \n" \ 161 " cache %1, 0x1a0(%0); cache %1, 0x1b0(%0) \n" \
161 " cache %1, 0x1c0(%0); cache %1, 0x1d0(%0) \n" \ 162 " cache %1, 0x1c0(%0); cache %1, 0x1d0(%0) \n" \
162 " cache %1, 0x1e0(%0); cache %1, 0x1f0(%0) \n" \ 163 " cache %1, 0x1e0(%0); cache %1, 0x1f0(%0) \n" \
163 " .set mips0 \n" \ 164 " .set pop \n" \
164 " .set reorder \n" \
165 : \ 165 : \
166 : "r" (base), \ 166 : "r" (base), \
167 "i" (op)); 167 "i" (op));
@@ -285,6 +285,7 @@ static inline void blast_scache16_page_indexed(unsigned long page)
285 285
286#define cache32_unroll32(base,op) \ 286#define cache32_unroll32(base,op) \
287 __asm__ __volatile__( \ 287 __asm__ __volatile__( \
288 " .set push \n" \
288 " .set noreorder \n" \ 289 " .set noreorder \n" \
289 " .set mips3 \n" \ 290 " .set mips3 \n" \
290 " cache %1, 0x000(%0); cache %1, 0x020(%0) \n" \ 291 " cache %1, 0x000(%0); cache %1, 0x020(%0) \n" \
@@ -303,8 +304,7 @@ static inline void blast_scache16_page_indexed(unsigned long page)
303 " cache %1, 0x340(%0); cache %1, 0x360(%0) \n" \ 304 " cache %1, 0x340(%0); cache %1, 0x360(%0) \n" \
304 " cache %1, 0x380(%0); cache %1, 0x3a0(%0) \n" \ 305 " cache %1, 0x380(%0); cache %1, 0x3a0(%0) \n" \
305 " cache %1, 0x3c0(%0); cache %1, 0x3e0(%0) \n" \ 306 " cache %1, 0x3c0(%0); cache %1, 0x3e0(%0) \n" \
306 " .set mips0 \n" \ 307 " .set pop \n" \
307 " .set reorder \n" \
308 : \ 308 : \
309 : "r" (base), \ 309 : "r" (base), \
310 "i" (op)); 310 "i" (op));
@@ -428,6 +428,7 @@ static inline void blast_scache32_page_indexed(unsigned long page)
428 428
429#define cache64_unroll32(base,op) \ 429#define cache64_unroll32(base,op) \
430 __asm__ __volatile__( \ 430 __asm__ __volatile__( \
431 " .set push \n" \
431 " .set noreorder \n" \ 432 " .set noreorder \n" \
432 " .set mips3 \n" \ 433 " .set mips3 \n" \
433 " cache %1, 0x000(%0); cache %1, 0x040(%0) \n" \ 434 " cache %1, 0x000(%0); cache %1, 0x040(%0) \n" \
@@ -446,8 +447,7 @@ static inline void blast_scache32_page_indexed(unsigned long page)
446 " cache %1, 0x680(%0); cache %1, 0x6c0(%0) \n" \ 447 " cache %1, 0x680(%0); cache %1, 0x6c0(%0) \n" \
447 " cache %1, 0x700(%0); cache %1, 0x740(%0) \n" \ 448 " cache %1, 0x700(%0); cache %1, 0x740(%0) \n" \
448 " cache %1, 0x780(%0); cache %1, 0x7c0(%0) \n" \ 449 " cache %1, 0x780(%0); cache %1, 0x7c0(%0) \n" \
449 " .set mips0 \n" \ 450 " .set pop \n" \
450 " .set reorder \n" \
451 : \ 451 : \
452 : "r" (base), \ 452 : "r" (base), \
453 "i" (op)); 453 "i" (op));
@@ -532,6 +532,7 @@ static inline void blast_scache64_page_indexed(unsigned long page)
532 532
533#define cache128_unroll32(base,op) \ 533#define cache128_unroll32(base,op) \
534 __asm__ __volatile__( \ 534 __asm__ __volatile__( \
535 " .set push \n" \
535 " .set noreorder \n" \ 536 " .set noreorder \n" \
536 " .set mips3 \n" \ 537 " .set mips3 \n" \
537 " cache %1, 0x000(%0); cache %1, 0x080(%0) \n" \ 538 " cache %1, 0x000(%0); cache %1, 0x080(%0) \n" \
@@ -550,8 +551,7 @@ static inline void blast_scache64_page_indexed(unsigned long page)
550 " cache %1, 0xd00(%0); cache %1, 0xd80(%0) \n" \ 551 " cache %1, 0xd00(%0); cache %1, 0xd80(%0) \n" \
551 " cache %1, 0xe00(%0); cache %1, 0xe80(%0) \n" \ 552 " cache %1, 0xe00(%0); cache %1, 0xe80(%0) \n" \
552 " cache %1, 0xf00(%0); cache %1, 0xf80(%0) \n" \ 553 " cache %1, 0xf00(%0); cache %1, 0xf80(%0) \n" \
553 " .set mips0 \n" \ 554 " .set pop \n" \
554 " .set reorder \n" \
555 : \ 555 : \
556 : "r" (base), \ 556 : "r" (base), \
557 "i" (op)); 557 "i" (op));