aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMarkos Chandras <markos.chandras@imgtec.com>2014-01-03 05:11:45 -0500
committerRalf Baechle <ralf@linux-mips.org>2014-03-26 18:09:15 -0400
commitfd9720e96e856160f94907db06b707841cbafb0d (patch)
tree4586a2d89749ee8a729a1a7f9b05bfac66a30879
parent6d5155c2a618207c6154be2e172ba92676dd82ca (diff)
MIPS: lib: memset: Add EVA support for the __bzero function.
Build the __bzero function using the EVA load/store instructions when operating in the EVA mode. This function is only used when accessing user code so there is no need to build two distinct symbols for user and kernel operations respectively. Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
-rw-r--r--arch/mips/lib/memset.S27
1 files changed, 23 insertions, 4 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index 05fac199cc0c..7b0e5462ca51 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -37,13 +37,24 @@
37#define LEGACY_MODE 1 37#define LEGACY_MODE 1
38#define EVA_MODE 2 38#define EVA_MODE 2
39 39
40/*
41 * No need to protect it with EVA #ifdefery. The generated block of code
42 * will never be assembled if EVA is not enabled.
43 */
44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
46
40#define EX(insn,reg,addr,handler) \ 47#define EX(insn,reg,addr,handler) \
419: insn reg, addr; \ 48 .if \mode == LEGACY_MODE; \
499: insn reg, addr; \
50 .else; \
519: ___BUILD_EVA_INSN(insn, reg, addr); \
52 .endif; \
42 .section __ex_table,"a"; \ 53 .section __ex_table,"a"; \
43 PTR 9b, handler; \ 54 PTR 9b, handler; \
44 .previous 55 .previous
45 56
46 .macro f_fill64 dst, offset, val, fixup 57 .macro f_fill64 dst, offset, val, fixup, mode
47 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) 58 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
48 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) 59 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
49 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) 60 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
@@ -119,7 +130,7 @@
119 .set reorder 130 .set reorder
1201: PTR_ADDIU a0, 64 1311: PTR_ADDIU a0, 64
121 R10KCBARRIER(0(ra)) 132 R10KCBARRIER(0(ra))
122 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@ 133 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
123 bne t1, a0, 1b 134 bne t1, a0, 1b
124 .set noreorder 135 .set noreorder
125 136
@@ -144,7 +155,7 @@
144 .set noreorder 155 .set noreorder
145 .set nomacro 156 .set nomacro
146 /* ... but first do longs ... */ 157 /* ... but first do longs ... */
147 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@ 158 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1482: .set pop 1592: .set pop
149 andi a2, STORMASK /* At most one long to go */ 160 andi a2, STORMASK /* At most one long to go */
150 161
@@ -225,5 +236,13 @@ LEAF(memset)
225#endif 236#endif
226 or a1, t1 237 or a1, t1
2271: 2381:
239#ifndef CONFIG_EVA
228FEXPORT(__bzero) 240FEXPORT(__bzero)
241#endif
229 __BUILD_BZERO LEGACY_MODE 242 __BUILD_BZERO LEGACY_MODE
243
244#ifdef CONFIG_EVA
245LEAF(__bzero)
246 __BUILD_BZERO EVA_MODE
247END(__bzero)
248#endif