diff options
Diffstat (limited to 'arch/x86/kernel/entry_64.S')
-rw-r--r-- | arch/x86/kernel/entry_64.S | 28 |
1 files changed, 10 insertions, 18 deletions
diff --git a/arch/x86/kernel/entry_64.S b/arch/x86/kernel/entry_64.S index b25ca969edd2..c844f0816ab8 100644 --- a/arch/x86/kernel/entry_64.S +++ b/arch/x86/kernel/entry_64.S | |||
@@ -830,27 +830,24 @@ restore_args: | |||
830 | RESTORE_ARGS 1,8,1 | 830 | RESTORE_ARGS 1,8,1 |
831 | 831 | ||
832 | irq_return: | 832 | irq_return: |
833 | INTERRUPT_RETURN | ||
834 | |||
835 | ENTRY(native_iret) | ||
833 | /* | 836 | /* |
834 | * Are we returning to a stack segment from the LDT? Note: in | 837 | * Are we returning to a stack segment from the LDT? Note: in |
835 | * 64-bit mode SS:RSP on the exception stack is always valid. | 838 | * 64-bit mode SS:RSP on the exception stack is always valid. |
836 | */ | 839 | */ |
837 | #ifdef CONFIG_X86_ESPFIX64 | 840 | #ifdef CONFIG_X86_ESPFIX64 |
838 | testb $4,(SS-RIP)(%rsp) | 841 | testb $4,(SS-RIP)(%rsp) |
839 | jnz irq_return_ldt | 842 | jnz native_irq_return_ldt |
840 | #endif | 843 | #endif |
841 | 844 | ||
842 | irq_return_iret: | 845 | native_irq_return_iret: |
843 | INTERRUPT_RETURN | ||
844 | _ASM_EXTABLE(irq_return_iret, bad_iret) | ||
845 | |||
846 | #ifdef CONFIG_PARAVIRT | ||
847 | ENTRY(native_iret) | ||
848 | iretq | 846 | iretq |
849 | _ASM_EXTABLE(native_iret, bad_iret) | 847 | _ASM_EXTABLE(native_irq_return_iret, bad_iret) |
850 | #endif | ||
851 | 848 | ||
852 | #ifdef CONFIG_X86_ESPFIX64 | 849 | #ifdef CONFIG_X86_ESPFIX64 |
853 | irq_return_ldt: | 850 | native_irq_return_ldt: |
854 | pushq_cfi %rax | 851 | pushq_cfi %rax |
855 | pushq_cfi %rdi | 852 | pushq_cfi %rdi |
856 | SWAPGS | 853 | SWAPGS |
@@ -872,7 +869,7 @@ irq_return_ldt: | |||
872 | SWAPGS | 869 | SWAPGS |
873 | movq %rax,%rsp | 870 | movq %rax,%rsp |
874 | popq_cfi %rax | 871 | popq_cfi %rax |
875 | jmp irq_return_iret | 872 | jmp native_irq_return_iret |
876 | #endif | 873 | #endif |
877 | 874 | ||
878 | .section .fixup,"ax" | 875 | .section .fixup,"ax" |
@@ -956,13 +953,8 @@ __do_double_fault: | |||
956 | cmpl $__KERNEL_CS,CS(%rdi) | 953 | cmpl $__KERNEL_CS,CS(%rdi) |
957 | jne do_double_fault | 954 | jne do_double_fault |
958 | movq RIP(%rdi),%rax | 955 | movq RIP(%rdi),%rax |
959 | cmpq $irq_return_iret,%rax | 956 | cmpq $native_irq_return_iret,%rax |
960 | #ifdef CONFIG_PARAVIRT | ||
961 | je 1f | ||
962 | cmpq $native_iret,%rax | ||
963 | #endif | ||
964 | jne do_double_fault /* This shouldn't happen... */ | 957 | jne do_double_fault /* This shouldn't happen... */ |
965 | 1: | ||
966 | movq PER_CPU_VAR(kernel_stack),%rax | 958 | movq PER_CPU_VAR(kernel_stack),%rax |
967 | subq $(6*8-KERNEL_STACK_OFFSET),%rax /* Reset to original stack */ | 959 | subq $(6*8-KERNEL_STACK_OFFSET),%rax /* Reset to original stack */ |
968 | movq %rax,RSP(%rdi) | 960 | movq %rax,RSP(%rdi) |
@@ -1428,7 +1420,7 @@ error_sti: | |||
1428 | */ | 1420 | */ |
1429 | error_kernelspace: | 1421 | error_kernelspace: |
1430 | incl %ebx | 1422 | incl %ebx |
1431 | leaq irq_return_iret(%rip),%rcx | 1423 | leaq native_irq_return_iret(%rip),%rcx |
1432 | cmpq %rcx,RIP+8(%rsp) | 1424 | cmpq %rcx,RIP+8(%rsp) |
1433 | je error_swapgs | 1425 | je error_swapgs |
1434 | movl %ecx,%eax /* zero extend */ | 1426 | movl %ecx,%eax /* zero extend */ |