From 45a9f9fedf033e90ddb7eb3130cfe635b55c2d69 Mon Sep 17 00:00:00 2001 From: Pawel Wieczorkiewicz Date: Tue, 27 Oct 2020 10:38:51 +0100 Subject: [PATCH] rmode: add prot_to_real() functionality Add protected mode function prot_to_real() calling real mode entering function _prot_to_real(). In the real mode supporting code handle two cases: * entering and returning for long mode * entering and returning for protected mode Certain code paths can be reused, but a flag (return_mode) is needed to know which path to return to. Signed-off-by: Pawel Wieczorkiewicz --- arch/x86/boot/head.S | 20 +++++- arch/x86/real_prot_dance.S | 124 ++++++++++++++++++++++++++++++++++--- 2 files changed, 134 insertions(+), 10 deletions(-) diff --git a/arch/x86/boot/head.S b/arch/x86/boot/head.S index f3d180b4..d4b42d83 100644 --- a/arch/x86/boot/head.S +++ b/arch/x86/boot/head.S @@ -64,6 +64,12 @@ GLOBAL(_start) mov $l4_pt_entries, %eax mov %eax, %cr3 +#if defined (KTF_UNIT_TEST) + SAVE_REGS_32 + call prot_to_real + RESTORE_REGS_32 +#endif + /* Enable long mode */ movl $MSR_EFER, %ecx rdmsr @@ -76,7 +82,19 @@ GLOBAL(_start) mov %eax, %cr0 /* clear prefetch and jump to 64bit code */ - ljmp $__KERN_CS64, $.Llong_mode + ljmp $__KERN_CS64, $.Llong_mode + +ENTRY(prot_to_real) + lea (boot_gdt_ptr), %eax + push %eax + lea (boot_idt_ptr), %eax + push %eax + + call _prot_to_real + + add $8, %esp + ret +END_FUNC(prot_to_real) .code64 .Llong_mode: diff --git a/arch/x86/real_prot_dance.S b/arch/x86/real_prot_dance.S index 6702fd16..c42c11de 100644 --- a/arch/x86/real_prot_dance.S +++ b/arch/x86/real_prot_dance.S @@ -27,6 +27,9 @@ #include #include +#define RETURN_TO_PROT_MODE 0x01 +#define RETURN_TO_LONG_MODE 0x02 + .code64 SECTION(.text.rmode, "ax", 16) ENTRY(_long_to_real) @@ -79,24 +82,75 @@ ENTRY(_long_to_real) /* Save stack pointer */ mov %rsp, _saved_sp + movb $RETURN_TO_LONG_MODE, return_mode + push $__KERN_CS32 - push $.Lprot_mode + push $.Lfrom_long_mode lretq END_FUNC(_long_to_real) .code32 -.align 16 -.Lprot_mode: - /* Load real mode accessible GDT and IDT */ - lgdt rmode_gdt_ptr - lidt rmode_idt_ptr +ENTRY(_prot_to_real) + /* Save current GDT and IDT in low memory area */ + mov 4(%esp), %eax + mov %eax, _saved_idt_ptr + mov 8(%esp), %eax + mov %eax, _saved_gdt_ptr + + /* Save current flags register */ + pushfl + + /* Disable interrupts. If we get NMI or MC, hope we get back here */ + cli + + /* Save control registers context */ + mov %cr0, %eax + mov %eax, _saved_cr0 + + mov %cr3, %eax + mov %eax, _saved_cr3 + + mov %cr4, %eax + mov %eax, _saved_cr4 + + /* Save segment selector registers */ + movw %ds, %ax + movw %ax, _saved_ds + + movw %es, %ax + movw %ax, _saved_es + + movw %gs, %ax + movw %ax, _saved_gs + + movw %fs, %ax + movw %ax, _saved_fs + + movw %ss, %ax + movw %ax, _saved_ss + + /* Save stack pointer */ + mov %esp, _saved_sp + + movb $RETURN_TO_PROT_MODE, return_mode + + jmp .Lfrom_prot_mode +END_FUNC(_prot_to_real) +.align 16 +.Lfrom_long_mode: /* Disable LME in EFER */ movl $MSR_EFER, %ecx rdmsr and $~EFER_LME, %eax wrmsr +.align 16 +.Lfrom_prot_mode: + /* Load real mode accessible GDT and IDT */ + lgdt rmode_gdt_ptr + lidt rmode_idt_ptr + /* Disable paging to enter protected mode */ mov %cr0, %eax and $~(X86_CR0_PG | X86_CR0_WP), %eax @@ -168,11 +222,17 @@ END_FUNC(_long_to_real) mov %eax, %fs mov %eax, %ss + /* Find correct mode to return to */ + cmpb $RETURN_TO_LONG_MODE, return_mode + movb $0x00, return_mode + jne .Lprot_to_prot + ljmp $__KERN_CS32, $.Lret_to_long_mode_prot +.Lprot_to_prot: ljmp $__KERN_CS32, $.Lret_to_prot_mode .code32 .align 16 -.Lret_to_prot_mode: +.Lret_to_long_mode_prot: mov %cr4, %eax or $(X86_CR4_PAE | X86_CR4_PSE), %eax mov %eax, %cr4 @@ -194,11 +254,54 @@ END_FUNC(_long_to_real) or $(X86_CR0_PG | X86_CR0_WP), %eax mov %eax, %cr0 - ljmp $__KERN_CS64, $.Lret_to_long_mode + ljmp $__KERN_CS64, $.Lret_to_long_mode_long + +.align 16 +.Lret_to_prot_mode: + /* Restore GDT and IDT descriptors */ + mov _saved_gdt_ptr, %eax + lgdt (%eax) + mov _saved_idt_ptr, %eax + lidt (%eax) + + /* Restore control registers */ + mov _saved_cr0, %eax + mov %eax, %cr0 + + /* Restore original page tables */ + mov _saved_cr3, %eax + mov %eax, %cr3 + + mov _saved_cr4, %eax + mov %eax, %cr4 + + /* Restore segment selector registers */ + mov _saved_ds, %eax + mov %eax, %ds + + mov _saved_es, %eax + mov %eax, %es + + mov _saved_fs, %eax + mov %eax, %fs + + mov _saved_gs, %eax + mov %eax, %gs + + mov _saved_ss, %eax + mov %eax, %ss + + /* Restore stack pointer */ + mov _saved_sp, %esp + + /* Restore flags register */ + popfl + + ret .code64 .align 16 -.Lret_to_long_mode: +.Lret_to_long_mode_long: /* Restore control registers */ mov _saved_cr0, %rax mov %rax, %cr0 @@ -243,6 +346,9 @@ END_FUNC(_long_to_real) /* Data section allocations */ SECTION(.bss.rmode, "aw", 16) +GLOBAL(return_mode) + .byte 0x0 + GLOBAL(_saved_cr0) .quad 0x0