diff options
author | H. Peter Anvin <hpa@zytor.com> | 2012-06-21 13:25:03 -0400 |
---|---|---|
committer | H. Peter Anvin <hpa@zytor.com> | 2012-06-21 13:25:03 -0400 |
commit | 9751d7627582fc1cc64625d63bde9528c14f1544 (patch) | |
tree | 33b0b58eb47f4bda4478c2d658019a1d2986cad8 | |
parent | 2b1b712f050eaf0ac576591281446dc960c0afc5 (diff) |
x86-64, reboot: Be more paranoid in 64-bit reboot=bios
Be a bit more paranoid in the transition back to 16-bit mode. In
particular, in case the kernel is residing above the 4 GiB mark,
switch to the trampoline GDT, and make the jump after turning off
paging a far jump. In theory, none of this should matter, but it is
exactly the kind of things that broken SMM or virtualization software
could trip up on.
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Link: http://lkml.kernel.org/r/tip-jopx7y6g6dbcx4tpal8q0jlr@git.kernel.org
-rw-r--r-- | arch/x86/realmode/rm/reboot.S | 8 |
1 files changed, 6 insertions, 2 deletions
diff --git a/arch/x86/realmode/rm/reboot.S b/arch/x86/realmode/rm/reboot.S index 6bf8feac5557..f932ea61d1c8 100644 --- a/arch/x86/realmode/rm/reboot.S +++ b/arch/x86/realmode/rm/reboot.S | |||
@@ -22,14 +22,18 @@ | |||
22 | ENTRY(machine_real_restart_asm) | 22 | ENTRY(machine_real_restart_asm) |
23 | 23 | ||
24 | #ifdef CONFIG_X86_64 | 24 | #ifdef CONFIG_X86_64 |
25 | /* Switch to trampoline GDT as it is guaranteed < 4 GiB */ | ||
26 | movl $__KERNEL_DS, %eax | ||
27 | movl %eax, %ds | ||
28 | lgdtl pa_tr_gdt | ||
25 | 29 | ||
26 | /* Disable paging to drop us out of long mode */ | 30 | /* Disable paging to drop us out of long mode */ |
27 | movl %cr0, %eax | 31 | movl %cr0, %eax |
28 | andl $~X86_CR0_PG, %eax | 32 | andl $~X86_CR0_PG, %eax |
29 | movl %eax, %cr0 | 33 | movl %eax, %cr0 |
30 | jmp 1f /* "A branch" may be needed here, assume near is OK */ | 34 | ljmpl $__KERNEL32_CS, $pa_machine_real_restart_paging_off |
31 | 35 | ||
32 | 1: | 36 | GLOBAL(machine_real_restart_paging_off) |
33 | xorl %eax, %eax | 37 | xorl %eax, %eax |
34 | xorl %edx, %edx | 38 | xorl %edx, %edx |
35 | movl $MSR_EFER, %ecx | 39 | movl $MSR_EFER, %ecx |