x86 setup: make PM transition more paranoid; cleanup 32-bit entry
Make the transition to protected mode more paranoid by having
back-to-back near jump (to synchronize the 386/486 prefetch queue) and
far jump (to set up the code segment.)
While we're at it, zero as many registers as practical (for future
expandability of the 32-bit entry interface) and enter 32-bit mode
with a valid stack. Note that the 32-bit code cannot rely on this
stack, or we'll break all other existing users of the 32-bit
entrypoint, but it may make debugging hacks easier to write.
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
diff --git a/arch/x86/boot/pmjump.S b/arch/x86/boot/pmjump.S
index fa6bed1..ef0da1f 100644
--- a/arch/x86/boot/pmjump.S
+++ b/arch/x86/boot/pmjump.S
@@ -29,12 +29,13 @@
*/
protected_mode_jump:
movl %edx, %esi # Pointer to boot_params table
- movl %eax, 2f # Patch ljmpl instruction
+
+ xorl %ebx, %ebx
+ movw %cs, %bx
+ shll $4, %ebx
+ addl %ebx, 2f
movw $__BOOT_DS, %cx
- xorl %ebx, %ebx # Per the 32-bit boot protocol
- xorl %ebp, %ebp # Per the 32-bit boot protocol
- xorl %edi, %edi # Per the 32-bit boot protocol
movl %cr0, %edx
orb $1, %dl # Protected mode (PE) bit
@@ -42,15 +43,34 @@
jmp 1f # Short jump to serialize on 386/486
1:
- movw %cx, %ds
- movw %cx, %es
- movw %cx, %fs
- movw %cx, %gs
- movw %cx, %ss
-
- # Jump to the 32-bit entrypoint
+ # Transition to 32-bit mode
.byte 0x66, 0xea # ljmpl opcode
-2: .long 0 # offset
+2: .long in_pm32 # offset
.word __BOOT_CS # segment
.size protected_mode_jump, .-protected_mode_jump
+
+ .code32
+ .type in_pm32, @function
+in_pm32:
+ # Set up data segments for flat 32-bit mode
+ movl %ecx, %ds
+ movl %ecx, %es
+ movl %ecx, %fs
+ movl %ecx, %gs
+ movl %ecx, %ss
+ # The 32-bit code sets up its own stack, but this way we do have
+ # a valid stack if some debugging hack wants to use it.
+ addl %ebx, %esp
+
+ # Clear registers to allow for future extensions to the
+ # 32-bit boot protocol
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ xorl %ebx, %ebx
+ xorl %ebp, %ebp
+ xorl %edi, %edi
+
+ jmpl *%eax # Jump to the 32-bit entrypoint
+
+ .size in_pm32, .-in_pm32