diff options
Diffstat (limited to 'arch/x86_64/lib/csum-copy.S')
-rw-r--r-- | arch/x86_64/lib/csum-copy.S | 26 |
1 files changed, 21 insertions, 5 deletions
diff --git a/arch/x86_64/lib/csum-copy.S b/arch/x86_64/lib/csum-copy.S index 72fd55ee896e..f0dba36578ea 100644 --- a/arch/x86_64/lib/csum-copy.S +++ b/arch/x86_64/lib/csum-copy.S | |||
@@ -5,8 +5,9 @@ | |||
5 | * License. See the file COPYING in the main directory of this archive | 5 | * License. See the file COPYING in the main directory of this archive |
6 | * for more details. No warranty for anything given at all. | 6 | * for more details. No warranty for anything given at all. |
7 | */ | 7 | */ |
8 | #include <linux/linkage.h> | 8 | #include <linux/linkage.h> |
9 | #include <asm/errno.h> | 9 | #include <asm/dwarf2.h> |
10 | #include <asm/errno.h> | ||
10 | 11 | ||
11 | /* | 12 | /* |
12 | * Checksum copy with exception handling. | 13 | * Checksum copy with exception handling. |
@@ -53,19 +54,24 @@ | |||
53 | .endm | 54 | .endm |
54 | 55 | ||
55 | 56 | ||
56 | .globl csum_partial_copy_generic | 57 | ENTRY(csum_partial_copy_generic) |
57 | .p2align 4 | 58 | CFI_STARTPROC |
58 | csum_partial_copy_generic: | ||
59 | cmpl $3*64,%edx | 59 | cmpl $3*64,%edx |
60 | jle .Lignore | 60 | jle .Lignore |
61 | 61 | ||
62 | .Lignore: | 62 | .Lignore: |
63 | subq $7*8,%rsp | 63 | subq $7*8,%rsp |
64 | CFI_ADJUST_CFA_OFFSET 7*8 | ||
64 | movq %rbx,2*8(%rsp) | 65 | movq %rbx,2*8(%rsp) |
66 | CFI_REL_OFFSET rbx, 2*8 | ||
65 | movq %r12,3*8(%rsp) | 67 | movq %r12,3*8(%rsp) |
68 | CFI_REL_OFFSET r12, 3*8 | ||
66 | movq %r14,4*8(%rsp) | 69 | movq %r14,4*8(%rsp) |
70 | CFI_REL_OFFSET r14, 4*8 | ||
67 | movq %r13,5*8(%rsp) | 71 | movq %r13,5*8(%rsp) |
72 | CFI_REL_OFFSET r13, 5*8 | ||
68 | movq %rbp,6*8(%rsp) | 73 | movq %rbp,6*8(%rsp) |
74 | CFI_REL_OFFSET rbp, 6*8 | ||
69 | 75 | ||
70 | movq %r8,(%rsp) | 76 | movq %r8,(%rsp) |
71 | movq %r9,1*8(%rsp) | 77 | movq %r9,1*8(%rsp) |
@@ -208,14 +214,22 @@ csum_partial_copy_generic: | |||
208 | addl %ebx,%eax | 214 | addl %ebx,%eax |
209 | adcl %r9d,%eax /* carry */ | 215 | adcl %r9d,%eax /* carry */ |
210 | 216 | ||
217 | CFI_REMEMBER_STATE | ||
211 | .Lende: | 218 | .Lende: |
212 | movq 2*8(%rsp),%rbx | 219 | movq 2*8(%rsp),%rbx |
220 | CFI_RESTORE rbx | ||
213 | movq 3*8(%rsp),%r12 | 221 | movq 3*8(%rsp),%r12 |
222 | CFI_RESTORE r12 | ||
214 | movq 4*8(%rsp),%r14 | 223 | movq 4*8(%rsp),%r14 |
224 | CFI_RESTORE r14 | ||
215 | movq 5*8(%rsp),%r13 | 225 | movq 5*8(%rsp),%r13 |
226 | CFI_RESTORE r13 | ||
216 | movq 6*8(%rsp),%rbp | 227 | movq 6*8(%rsp),%rbp |
228 | CFI_RESTORE rbp | ||
217 | addq $7*8,%rsp | 229 | addq $7*8,%rsp |
230 | CFI_ADJUST_CFA_OFFSET -7*8 | ||
218 | ret | 231 | ret |
232 | CFI_RESTORE_STATE | ||
219 | 233 | ||
220 | /* Exception handlers. Very simple, zeroing is done in the wrappers */ | 234 | /* Exception handlers. Very simple, zeroing is done in the wrappers */ |
221 | .Lbad_source: | 235 | .Lbad_source: |
@@ -231,3 +245,5 @@ csum_partial_copy_generic: | |||
231 | jz .Lende | 245 | jz .Lende |
232 | movl $-EFAULT,(%rax) | 246 | movl $-EFAULT,(%rax) |
233 | jmp .Lende | 247 | jmp .Lende |
248 | CFI_ENDPROC | ||
249 | ENDPROC(csum_partial_copy_generic) | ||