diff options
author | Markos Chandras <markos.chandras@imgtec.com> | 2014-01-03 04:23:16 -0500 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2014-03-26 18:09:15 -0400 |
commit | 6d5155c2a618207c6154be2e172ba92676dd82ca (patch) | |
tree | 7264987b040d0f939c07ffaa6f3fae97de7b2b04 | |
parent | 8483b14aaa81e9567dd69977f22efc4c8536184f (diff) |
MIPS: lib: memset: Use macro to build the __bzero symbol
Build the __bzero symbol using a macor. In EVA mode we will
need to use similar code to do the userspace load operations so
it is better if we use a macro to avoid code duplications.
Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
-rw-r--r-- | arch/mips/lib/memset.S | 95 |
1 files changed, 60 insertions, 35 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S index d8579857a81d..05fac199cc0c 100644 --- a/arch/mips/lib/memset.S +++ b/arch/mips/lib/memset.S | |||
@@ -34,6 +34,9 @@ | |||
34 | #define FILLPTRG t0 | 34 | #define FILLPTRG t0 |
35 | #endif | 35 | #endif |
36 | 36 | ||
37 | #define LEGACY_MODE 1 | ||
38 | #define EVA_MODE 2 | ||
39 | |||
37 | #define EX(insn,reg,addr,handler) \ | 40 | #define EX(insn,reg,addr,handler) \ |
38 | 9: insn reg, addr; \ | 41 | 9: insn reg, addr; \ |
39 | .section __ex_table,"a"; \ | 42 | .section __ex_table,"a"; \ |
@@ -63,33 +66,23 @@ | |||
63 | #endif | 66 | #endif |
64 | .endm | 67 | .endm |
65 | 68 | ||
66 | /* | ||
67 | * memset(void *s, int c, size_t n) | ||
68 | * | ||
69 | * a0: start of area to clear | ||
70 | * a1: char to fill with | ||
71 | * a2: size of area to clear | ||
72 | */ | ||
73 | .set noreorder | 69 | .set noreorder |
74 | .align 5 | 70 | .align 5 |
75 | LEAF(memset) | ||
76 | beqz a1, 1f | ||
77 | move v0, a0 /* result */ | ||
78 | 71 | ||
79 | andi a1, 0xff /* spread fillword */ | 72 | /* |
80 | LONG_SLL t1, a1, 8 | 73 | * Macro to generate the __bzero{,_user} symbol |
81 | or a1, t1 | 74 | * Arguments: |
82 | LONG_SLL t1, a1, 16 | 75 | * mode: LEGACY_MODE or EVA_MODE |
83 | #if LONGSIZE == 8 | 76 | */ |
84 | or a1, t1 | 77 | .macro __BUILD_BZERO mode |
85 | LONG_SLL t1, a1, 32 | 78 | /* Initialize __memset if this is the first time we call this macro */ |
86 | #endif | 79 | .ifnotdef __memset |
87 | or a1, t1 | 80 | .set __memset, 1 |
88 | 1: | 81 | .hidden __memset /* Make sure it does not leak */ |
82 | .endif | ||
89 | 83 | ||
90 | FEXPORT(__bzero) | ||
91 | sltiu t0, a2, STORSIZE /* very small region? */ | 84 | sltiu t0, a2, STORSIZE /* very small region? */ |
92 | bnez t0, .Lsmall_memset | 85 | bnez t0, .Lsmall_memset\@ |
93 | andi t0, a0, STORMASK /* aligned? */ | 86 | andi t0, a0, STORMASK /* aligned? */ |
94 | 87 | ||
95 | #ifdef CONFIG_CPU_MICROMIPS | 88 | #ifdef CONFIG_CPU_MICROMIPS |
@@ -109,28 +102,28 @@ FEXPORT(__bzero) | |||
109 | 102 | ||
110 | R10KCBARRIER(0(ra)) | 103 | R10KCBARRIER(0(ra)) |
111 | #ifdef __MIPSEB__ | 104 | #ifdef __MIPSEB__ |
112 | EX(LONG_S_L, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */ | 105 | EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ |
113 | #endif | 106 | #endif |
114 | #ifdef __MIPSEL__ | 107 | #ifdef __MIPSEL__ |
115 | EX(LONG_S_R, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */ | 108 | EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ |
116 | #endif | 109 | #endif |
117 | PTR_SUBU a0, t0 /* long align ptr */ | 110 | PTR_SUBU a0, t0 /* long align ptr */ |
118 | PTR_ADDU a2, t0 /* correct size */ | 111 | PTR_ADDU a2, t0 /* correct size */ |
119 | 112 | ||
120 | 1: ori t1, a2, 0x3f /* # of full blocks */ | 113 | 1: ori t1, a2, 0x3f /* # of full blocks */ |
121 | xori t1, 0x3f | 114 | xori t1, 0x3f |
122 | beqz t1, .Lmemset_partial /* no block to fill */ | 115 | beqz t1, .Lmemset_partial\@ /* no block to fill */ |
123 | andi t0, a2, 0x40-STORSIZE | 116 | andi t0, a2, 0x40-STORSIZE |
124 | 117 | ||
125 | PTR_ADDU t1, a0 /* end address */ | 118 | PTR_ADDU t1, a0 /* end address */ |
126 | .set reorder | 119 | .set reorder |
127 | 1: PTR_ADDIU a0, 64 | 120 | 1: PTR_ADDIU a0, 64 |
128 | R10KCBARRIER(0(ra)) | 121 | R10KCBARRIER(0(ra)) |
129 | f_fill64 a0, -64, FILL64RG, .Lfwd_fixup | 122 | f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@ |
130 | bne t1, a0, 1b | 123 | bne t1, a0, 1b |
131 | .set noreorder | 124 | .set noreorder |
132 | 125 | ||
133 | .Lmemset_partial: | 126 | .Lmemset_partial\@: |
134 | R10KCBARRIER(0(ra)) | 127 | R10KCBARRIER(0(ra)) |
135 | PTR_LA t1, 2f /* where to start */ | 128 | PTR_LA t1, 2f /* where to start */ |
136 | #ifdef CONFIG_CPU_MICROMIPS | 129 | #ifdef CONFIG_CPU_MICROMIPS |
@@ -150,7 +143,8 @@ FEXPORT(__bzero) | |||
150 | .set push | 143 | .set push |
151 | .set noreorder | 144 | .set noreorder |
152 | .set nomacro | 145 | .set nomacro |
153 | f_fill64 a0, -64, FILL64RG, .Lpartial_fixup /* ... but first do longs ... */ | 146 | /* ... but first do longs ... */ |
147 | f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@ | ||
154 | 2: .set pop | 148 | 2: .set pop |
155 | andi a2, STORMASK /* At most one long to go */ | 149 | andi a2, STORMASK /* At most one long to go */ |
156 | 150 | ||
@@ -158,15 +152,15 @@ FEXPORT(__bzero) | |||
158 | PTR_ADDU a0, a2 /* What's left */ | 152 | PTR_ADDU a0, a2 /* What's left */ |
159 | R10KCBARRIER(0(ra)) | 153 | R10KCBARRIER(0(ra)) |
160 | #ifdef __MIPSEB__ | 154 | #ifdef __MIPSEB__ |
161 | EX(LONG_S_R, a1, -1(a0), .Llast_fixup) | 155 | EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) |
162 | #endif | 156 | #endif |
163 | #ifdef __MIPSEL__ | 157 | #ifdef __MIPSEL__ |
164 | EX(LONG_S_L, a1, -1(a0), .Llast_fixup) | 158 | EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) |
165 | #endif | 159 | #endif |
166 | 1: jr ra | 160 | 1: jr ra |
167 | move a2, zero | 161 | move a2, zero |
168 | 162 | ||
169 | .Lsmall_memset: | 163 | .Lsmall_memset\@: |
170 | beqz a2, 2f | 164 | beqz a2, 2f |
171 | PTR_ADDU t1, a0, a2 | 165 | PTR_ADDU t1, a0, a2 |
172 | 166 | ||
@@ -177,13 +171,17 @@ FEXPORT(__bzero) | |||
177 | 171 | ||
178 | 2: jr ra /* done */ | 172 | 2: jr ra /* done */ |
179 | move a2, zero | 173 | move a2, zero |
174 | .if __memset == 1 | ||
180 | END(memset) | 175 | END(memset) |
176 | .set __memset, 0 | ||
177 | .hidden __memset | ||
178 | .endif | ||
181 | 179 | ||
182 | .Lfirst_fixup: | 180 | .Lfirst_fixup\@: |
183 | jr ra | 181 | jr ra |
184 | nop | 182 | nop |
185 | 183 | ||
186 | .Lfwd_fixup: | 184 | .Lfwd_fixup\@: |
187 | PTR_L t0, TI_TASK($28) | 185 | PTR_L t0, TI_TASK($28) |
188 | andi a2, 0x3f | 186 | andi a2, 0x3f |
189 | LONG_L t0, THREAD_BUADDR(t0) | 187 | LONG_L t0, THREAD_BUADDR(t0) |
@@ -191,7 +189,7 @@ FEXPORT(__bzero) | |||
191 | jr ra | 189 | jr ra |
192 | LONG_SUBU a2, t0 | 190 | LONG_SUBU a2, t0 |
193 | 191 | ||
194 | .Lpartial_fixup: | 192 | .Lpartial_fixup\@: |
195 | PTR_L t0, TI_TASK($28) | 193 | PTR_L t0, TI_TASK($28) |
196 | andi a2, STORMASK | 194 | andi a2, STORMASK |
197 | LONG_L t0, THREAD_BUADDR(t0) | 195 | LONG_L t0, THREAD_BUADDR(t0) |
@@ -199,6 +197,33 @@ FEXPORT(__bzero) | |||
199 | jr ra | 197 | jr ra |
200 | LONG_SUBU a2, t0 | 198 | LONG_SUBU a2, t0 |
201 | 199 | ||
202 | .Llast_fixup: | 200 | .Llast_fixup\@: |
203 | jr ra | 201 | jr ra |
204 | andi v1, a2, STORMASK | 202 | andi v1, a2, STORMASK |
203 | |||
204 | .endm | ||
205 | |||
206 | /* | ||
207 | * memset(void *s, int c, size_t n) | ||
208 | * | ||
209 | * a0: start of area to clear | ||
210 | * a1: char to fill with | ||
211 | * a2: size of area to clear | ||
212 | */ | ||
213 | |||
214 | LEAF(memset) | ||
215 | beqz a1, 1f | ||
216 | move v0, a0 /* result */ | ||
217 | |||
218 | andi a1, 0xff /* spread fillword */ | ||
219 | LONG_SLL t1, a1, 8 | ||
220 | or a1, t1 | ||
221 | LONG_SLL t1, a1, 16 | ||
222 | #if LONGSIZE == 8 | ||
223 | or a1, t1 | ||
224 | LONG_SLL t1, a1, 32 | ||
225 | #endif | ||
226 | or a1, t1 | ||
227 | 1: | ||
228 | FEXPORT(__bzero) | ||
229 | __BUILD_BZERO LEGACY_MODE | ||