aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/lib
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2016-03-14 21:43:51 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2016-03-14 21:43:51 -0400
commitd88bfe1d68735595d57bd071294f664c4f054435 (patch)
tree10a12422117f364a18f3a7b629b10dfe6f99da1a /arch/x86/lib
parente71c2c1eeb8de7a083a728c5b7e0b83ed1faf047 (diff)
parenteb1af3b71f9d83e45f2fd2fd649356e98e1c582c (diff)
Merge branch 'ras-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull RAS updates from Ingo Molnar: "Various RAS updates: - AMD MCE support updates for future CPUs, fixes and 'SMCA' (Scalable MCA) error decoding support (Aravind Gopalakrishnan) - x86 memcpy_mcsafe() support, to enable smart(er) hardware error recovery in NVDIMM drivers, based on an extension of the x86 exception handling code. (Tony Luck)" * 'ras-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: EDAC/sb_edac: Fix computation of channel address x86/mm, x86/mce: Add memcpy_mcsafe() x86/mce/AMD: Document some functionality x86/mce: Clarify comments regarding deferred error x86/mce/AMD: Fix logic to obtain block address x86/mce/AMD, EDAC: Enable error decoding of Scalable MCA errors x86/mce: Move MCx_CONFIG MSR definitions x86/mce: Check for faults tagged in EXTABLE_CLASS_FAULT exception table entries x86/mm: Expand the exception table logic to allow new handling options x86/mce/AMD: Set MCAX Enable bit x86/mce/AMD: Carve out threshold block preparation x86/mce/AMD: Fix LVT offset configuration for thresholding x86/mce/AMD: Reduce number of blocks scanned per bank x86/mce/AMD: Do not perform shared bank check for future processors x86/mce: Fix order of AMD MCE init function call
Diffstat (limited to 'arch/x86/lib')
-rw-r--r--arch/x86/lib/memcpy_64.S117
1 files changed, 117 insertions, 0 deletions
diff --git a/arch/x86/lib/memcpy_64.S b/arch/x86/lib/memcpy_64.S
index 16698bba87de..7d37641ada5b 100644
--- a/arch/x86/lib/memcpy_64.S
+++ b/arch/x86/lib/memcpy_64.S
@@ -177,3 +177,120 @@ ENTRY(memcpy_orig)
177.Lend: 177.Lend:
178 retq 178 retq
179ENDPROC(memcpy_orig) 179ENDPROC(memcpy_orig)
180
181#ifndef CONFIG_UML
182/*
183 * memcpy_mcsafe - memory copy with machine check exception handling
184 * Note that we only catch machine checks when reading the source addresses.
185 * Writes to target are posted and don't generate machine checks.
186 */
187ENTRY(memcpy_mcsafe)
188 cmpl $8, %edx
189 /* Less than 8 bytes? Go to byte copy loop */
190 jb .L_no_whole_words
191
192 /* Check for bad alignment of source */
193 testl $7, %esi
194 /* Already aligned */
195 jz .L_8byte_aligned
196
197 /* Copy one byte at a time until source is 8-byte aligned */
198 movl %esi, %ecx
199 andl $7, %ecx
200 subl $8, %ecx
201 negl %ecx
202 subl %ecx, %edx
203.L_copy_leading_bytes:
204 movb (%rsi), %al
205 movb %al, (%rdi)
206 incq %rsi
207 incq %rdi
208 decl %ecx
209 jnz .L_copy_leading_bytes
210
211.L_8byte_aligned:
212 /* Figure out how many whole cache lines (64-bytes) to copy */
213 movl %edx, %ecx
214 andl $63, %edx
215 shrl $6, %ecx
216 jz .L_no_whole_cache_lines
217
218 /* Loop copying whole cache lines */
219.L_cache_w0: movq (%rsi), %r8
220.L_cache_w1: movq 1*8(%rsi), %r9
221.L_cache_w2: movq 2*8(%rsi), %r10
222.L_cache_w3: movq 3*8(%rsi), %r11
223 movq %r8, (%rdi)
224 movq %r9, 1*8(%rdi)
225 movq %r10, 2*8(%rdi)
226 movq %r11, 3*8(%rdi)
227.L_cache_w4: movq 4*8(%rsi), %r8
228.L_cache_w5: movq 5*8(%rsi), %r9
229.L_cache_w6: movq 6*8(%rsi), %r10
230.L_cache_w7: movq 7*8(%rsi), %r11
231 movq %r8, 4*8(%rdi)
232 movq %r9, 5*8(%rdi)
233 movq %r10, 6*8(%rdi)
234 movq %r11, 7*8(%rdi)
235 leaq 64(%rsi), %rsi
236 leaq 64(%rdi), %rdi
237 decl %ecx
238 jnz .L_cache_w0
239
240 /* Are there any trailing 8-byte words? */
241.L_no_whole_cache_lines:
242 movl %edx, %ecx
243 andl $7, %edx
244 shrl $3, %ecx
245 jz .L_no_whole_words
246
247 /* Copy trailing words */
248.L_copy_trailing_words:
249 movq (%rsi), %r8
250 mov %r8, (%rdi)
251 leaq 8(%rsi), %rsi
252 leaq 8(%rdi), %rdi
253 decl %ecx
254 jnz .L_copy_trailing_words
255
256 /* Any trailing bytes? */
257.L_no_whole_words:
258 andl %edx, %edx
259 jz .L_done_memcpy_trap
260
261 /* Copy trailing bytes */
262 movl %edx, %ecx
263.L_copy_trailing_bytes:
264 movb (%rsi), %al
265 movb %al, (%rdi)
266 incq %rsi
267 incq %rdi
268 decl %ecx
269 jnz .L_copy_trailing_bytes
270
271 /* Copy successful. Return true */
272.L_done_memcpy_trap:
273 xorq %rax, %rax
274 ret
275ENDPROC(memcpy_mcsafe)
276
277 .section .fixup, "ax"
278 /* Return false for any failure */
279.L_memcpy_mcsafe_fail:
280 mov $1, %rax
281 ret
282
283 .previous
284
285 _ASM_EXTABLE_FAULT(.L_copy_leading_bytes, .L_memcpy_mcsafe_fail)
286 _ASM_EXTABLE_FAULT(.L_cache_w0, .L_memcpy_mcsafe_fail)
287 _ASM_EXTABLE_FAULT(.L_cache_w1, .L_memcpy_mcsafe_fail)
288 _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail)
289 _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail)
290 _ASM_EXTABLE_FAULT(.L_cache_w4, .L_memcpy_mcsafe_fail)
291 _ASM_EXTABLE_FAULT(.L_cache_w5, .L_memcpy_mcsafe_fail)
292 _ASM_EXTABLE_FAULT(.L_cache_w6, .L_memcpy_mcsafe_fail)
293 _ASM_EXTABLE_FAULT(.L_cache_w7, .L_memcpy_mcsafe_fail)
294 _ASM_EXTABLE_FAULT(.L_copy_trailing_words, .L_memcpy_mcsafe_fail)
295 _ASM_EXTABLE_FAULT(.L_copy_trailing_bytes, .L_memcpy_mcsafe_fail)
296#endif