aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/lib/memset.S
diff options
context:
space:
mode:
authorThomas Bogendoerfer <tsbogend@alpha.franken.de>2007-11-25 05:47:56 -0500
committerRalf Baechle <ralf@linux-mips.org>2008-01-29 05:14:58 -0500
commit930bff882296c02ca81db108672ef4ca06c37db5 (patch)
tree53288137d4f7cc02d8ca417edb2b25221c3007cd /arch/mips/lib/memset.S
parent2064ba23e58daa929eec6f5e7a2abc24574a95b9 (diff)
[MIPS] IP28: added cache barrier to assembly routines
IP28 needs special treatment to avoid speculative accesses. gcc takes care for .c code, but for assembly code we need to do it manually. This is taken from Peter Fuersts IP28 patches. Signed-off-by: Thomas Bogendoerfer <tsbogend@alpha.franken.de> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/lib/memset.S')
-rw-r--r--arch/mips/lib/memset.S5
1 files changed, 5 insertions, 0 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index 3bf38422342f..c018a4721693 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -86,6 +86,7 @@ FEXPORT(__bzero)
86 .set at 86 .set at
87#endif 87#endif
88 88
89 R10KCBARRIER(0(ra))
89#ifdef __MIPSEB__ 90#ifdef __MIPSEB__
90 EX(LONG_S_L, a1, (a0), first_fixup) /* make word/dword aligned */ 91 EX(LONG_S_L, a1, (a0), first_fixup) /* make word/dword aligned */
91#endif 92#endif
@@ -103,11 +104,13 @@ FEXPORT(__bzero)
103 PTR_ADDU t1, a0 /* end address */ 104 PTR_ADDU t1, a0 /* end address */
104 .set reorder 105 .set reorder
1051: PTR_ADDIU a0, 64 1061: PTR_ADDIU a0, 64
107 R10KCBARRIER(0(ra))
106 f_fill64 a0, -64, a1, fwd_fixup 108 f_fill64 a0, -64, a1, fwd_fixup
107 bne t1, a0, 1b 109 bne t1, a0, 1b
108 .set noreorder 110 .set noreorder
109 111
110memset_partial: 112memset_partial:
113 R10KCBARRIER(0(ra))
111 PTR_LA t1, 2f /* where to start */ 114 PTR_LA t1, 2f /* where to start */
112#if LONGSIZE == 4 115#if LONGSIZE == 4
113 PTR_SUBU t1, t0 116 PTR_SUBU t1, t0
@@ -129,6 +132,7 @@ memset_partial:
129 132
130 beqz a2, 1f 133 beqz a2, 1f
131 PTR_ADDU a0, a2 /* What's left */ 134 PTR_ADDU a0, a2 /* What's left */
135 R10KCBARRIER(0(ra))
132#ifdef __MIPSEB__ 136#ifdef __MIPSEB__
133 EX(LONG_S_R, a1, -1(a0), last_fixup) 137 EX(LONG_S_R, a1, -1(a0), last_fixup)
134#endif 138#endif
@@ -143,6 +147,7 @@ small_memset:
143 PTR_ADDU t1, a0, a2 147 PTR_ADDU t1, a0, a2
144 148
1451: PTR_ADDIU a0, 1 /* fill bytewise */ 1491: PTR_ADDIU a0, 1 /* fill bytewise */
150 R10KCBARRIER(0(ra))
146 bne t1, a0, 1b 151 bne t1, a0, 1b
147 sb a1, -1(a0) 152 sb a1, -1(a0)
148 153