@@ -52,6 +52,7 @@ struct trampoline_header {
u64 efer;
u32 cr4;
u32 flags;
+ u32 lock;
#endif
};
@@ -64,6 +65,8 @@ extern unsigned long initial_stack;
extern unsigned long initial_vc_handler;
#endif
+extern u32 *trampoline_lock;
+
extern unsigned char real_mode_blob[];
extern unsigned char real_mode_relocs[];
@@ -252,6 +252,16 @@ SYM_INNER_LABEL(secondary_startup_64_no_
movq TASK_threadsp(%rax), %rsp
/*
+ * Now that this CPU is running on its own stack, drop the realmode
+ * protection. For the boot CPU the pointer is NULL!
+ */
+ movq trampoline_lock(%rip), %rax
+ testq %rax, %rax
+ jz .Lsetup_gdt
+ movl $0, (%rax)
+
+.Lsetup_gdt:
+ /*
* We must switch to a new descriptor in kernel space for the GDT
* because soon the kernel won't have access anymore to the userspace
* addresses where we're currently running on. We have to do that here
@@ -433,6 +443,8 @@ SYM_DATA(initial_code, .quad x86_64_star
#ifdef CONFIG_AMD_MEM_ENCRYPT
SYM_DATA(initial_vc_handler, .quad handle_vc_boot_ghcb)
#endif
+
+SYM_DATA(trampoline_lock, .quad 0);
__FINITDATA
__INIT
@@ -154,6 +154,9 @@ static void __init setup_real_mode(void)
trampoline_header->flags = 0;
+ trampoline_lock = &trampoline_header->lock;
+ *trampoline_lock = 0;
+
trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd);
/* Map the real mode stub as virtual == physical */
@@ -37,6 +37,20 @@
.text
.code16
+.macro LOAD_REALMODE_ESP
+ /*
+ * Make sure only one CPU fiddles with the realmode stack
+ */
+.Llock_rm\@:
+ lock btsl $0, tr_lock
+ jnc 2f
+ pause
+ jmp .Llock_rm\@
+2:
+ # Setup stack
+ movl $rm_stack_end, %esp
+.endm
+
.balign PAGE_SIZE
SYM_CODE_START(trampoline_start)
cli # We should be safe anyway
@@ -49,8 +63,7 @@ SYM_CODE_START(trampoline_start)
mov %ax, %es
mov %ax, %ss
- # Setup stack
- movl $rm_stack_end, %esp
+ LOAD_REALMODE_ESP
call verify_cpu # Verify the cpu supports long mode
testl %eax, %eax # Check for return code
@@ -93,8 +106,7 @@ SYM_CODE_START(sev_es_trampoline_start)
mov %ax, %es
mov %ax, %ss
- # Setup stack
- movl $rm_stack_end, %esp
+ LOAD_REALMODE_ESP
jmp .Lswitch_to_protected
SYM_CODE_END(sev_es_trampoline_start)
@@ -177,7 +189,7 @@ SYM_CODE_START(pa_trampoline_compat)
* In compatibility mode. Prep ESP and DX for startup_32, then disable
* paging and complete the switch to legacy 32-bit mode.
*/
- movl $rm_stack_end, %esp
+ LOAD_REALMODE_ESP
movw $__KERNEL_DS, %dx
movl $(CR0_STATE & ~X86_CR0_PG), %eax
@@ -241,6 +253,7 @@ SYM_DATA_START(trampoline_header)
SYM_DATA(tr_efer, .space 8)
SYM_DATA(tr_cr4, .space 4)
SYM_DATA(tr_flags, .space 4)
+ SYM_DATA(tr_lock, .space 4)
SYM_DATA_END(trampoline_header)
#include "trampoline_common.S"