blob: 3ac86a9df101e2b5e9ebf5939d5fd76f3a698666 [file] [log] [blame]
Patrick Rudolph03a79522019-09-29 11:08:33 +02001/* SPDX-License-Identifier: GPL-2.0-only */
2
3/*
Martin Roth0949e732021-10-01 14:28:22 -06004 * For dropping from long mode to protected mode.
Patrick Rudolph03a79522019-09-29 11:08:33 +02005 *
6 * For reference see "AMD64 ArchitectureProgrammer's Manual Volume 2",
7 * Document 24593-Rev. 3.31-July 2019 Chapter 5.3
8 *
9 * Clobbers: rax, rbx, rcx, rdx
10 */
11.code64
12
13#include <cpu/x86/msr.h>
14#include <cpu/x86/cr.h>
Patrick Rudolphe3dd57e2020-08-15 06:14:57 +020015#if defined(__RAMSTAGE__)
16#include <arch/ram_segs.h>
17#define CODE_SEG RAM_CODE_SEG
18#define DATA_SEG RAM_DATA_SEG
19#else
Patrick Rudolph03a79522019-09-29 11:08:33 +020020#include <arch/rom_segs.h>
Patrick Rudolphe3dd57e2020-08-15 06:14:57 +020021#define CODE_SEG ROM_CODE_SEG
22#define DATA_SEG ROM_DATA_SEG
23#endif
Patrick Rudolph03a79522019-09-29 11:08:33 +020024
25drop_longmode:
Patrick Rudolph40dc53a2020-11-30 19:33:06 +010026#if !ENV_CACHE_AS_RAM
Patrick Rudolph03a79522019-09-29 11:08:33 +020027 /* Ensure cache is clean. */
28 wbinvd
Patrick Rudolph40dc53a2020-11-30 19:33:06 +010029#endif
Patrick Rudolph03a79522019-09-29 11:08:33 +020030 /* Set 32-bit code segment and ss */
Patrick Rudolphe3dd57e2020-08-15 06:14:57 +020031 mov $CODE_SEG, %rcx
Patrick Rudolph03a79522019-09-29 11:08:33 +020032 /* SetCodeSelector32 will drop us to protected mode on return */
33 call SetCodeSelector32
34
35 /* Skip SetCodeSelector32 */
36.code32
37 jmp __longmode_compatibility
38
39.align 8
40.code64
41SetCodeSelector32:
42 # pop the return address from stack
43 pop %rbx
44
45 # save rsp because we need to push it after ss
46 mov %rsp, %rdx
47
48 # use iret to jump to a 32-bit offset in a new code segment
49 # iret will pop cs:rip, flags, then ss:rsp
Martin Roth0949e732021-10-01 14:28:22 -060050 mov %ss, %ax # need to push ss, but push ss instruction
Patrick Rudolph03a79522019-09-29 11:08:33 +020051 push %rax # not valid in x64 mode, so use ax
52 push %rdx # the rsp to load
53 pushfq # push rflags
54 push %rcx # cx is code segment selector from caller
55 push %rbx # push the IP for the next instruction
56
57 # the iretq will behave like ret, with the new cs/ss value loaded
58 iretq
59
60.align 4
61.code32
62__longmode_compatibility:
63 /* Running in 32-bit compatibility mode */
64
65 /* Use flat data segment */
Patrick Rudolphe3dd57e2020-08-15 06:14:57 +020066 movl $DATA_SEG, %eax
Patrick Rudolph03a79522019-09-29 11:08:33 +020067 movl %eax, %ds
68 movl %eax, %es
69 movl %eax, %ss
70 movl %eax, %fs
Patrick Rudolph03a79522019-09-29 11:08:33 +020071
72 /* Disable paging. */
73 movl %cr0, %eax
74 andl $(~CR0_PG), %eax
75 movl %eax, %cr0
76
77 /* Disable long mode. */
78 movl $(IA32_EFER), %ecx
79 rdmsr
80 andl $(~EFER_LME), %eax
81 wrmsr
82
83 /* Disable PAE. */
84 movl %cr4, %eax
85 andl $(~CR4_PAE), %eax
86 movl %eax, %cr4
87
88 /* Clear page table register */
89 xor %eax, %eax
90 movl %eax, %cr3
91
92__longmode_exit: