blob: 639ab36a5785c5d27bb0554d7e5ad2e872db5f06 [file] [log] [blame]
Patrick Georgi11f00792020-03-04 15:10:45 +01001/* SPDX-License-Identifier: GPL-2.0-or-later */
Kyösti Mälkki47d58e52020-11-22 00:55:31 +02002
3/* For starting coreboot in protected mode */
4
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -08005/*
Arthur Heymansc05b1a62019-11-22 21:01:30 +01006 * This is the modern bootblock. It prepares the system for C environment runtime
Martin Roth20bbd812019-08-30 21:09:37 -06007 * setup. The actual setup is done by hardware-specific code.
8 *
9 * It provides a bootflow similar to other architectures, and thus is considered
10 * to be the modern approach.
11 *
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080012 */
13
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020014#include <arch/rom_segs.h>
Furquan Shaikhb3ee03c2016-04-13 17:15:36 -070015#include <cpu/x86/cr.h>
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020016#include <cpu/x86/post_code.h>
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080017
Kyösti Mälkki03083132020-11-22 00:34:13 +020018.section .init, "ax", @progbits
Kyösti Mälkkidf771c12019-12-21 10:17:56 +020019
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020020 .code32
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080021/*
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020022 * When we come here we are in protected mode.
23 * NOTE aligned to 4 so that we are sure that the prefetch
24 * cache will be reloaded.
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080025 */
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020026 .align 4
27
Kyösti Mälkki6c7441f2020-12-05 08:39:57 +020028.globl bootblock_protected_mode_entry
29bootblock_protected_mode_entry:
30
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020031 /* Save the BIST value */
32 movl %eax, %ebp
33
34#if !CONFIG(NO_EARLY_BOOTBLOCK_POSTCODES)
35 post_code(POST_ENTER_PROTECTED_MODE)
36#endif
37
38 movw $ROM_DATA_SEG, %ax
39 movw %ax, %ds
40 movw %ax, %es
41 movw %ax, %ss
42 movw %ax, %fs
43 movw %ax, %gs
44
45 /* Restore the BIST value to %eax */
46 movl %ebp, %eax
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080047
Julius Wernercd49cce2019-03-05 16:53:33 -080048#if CONFIG(BOOTBLOCK_DEBUG_SPINLOOP)
Lee Leahyfdc8c8b2016-06-07 08:45:17 -070049
50 /* Wait for a JTAG debugger to break in and set EBX non-zero */
51 xor %ebx, %ebx
52
53debug_spinloop:
54 cmp $0, %ebx
55 jz debug_spinloop
56#endif
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080057
Julius Wernercd49cce2019-03-05 16:53:33 -080058#if !CONFIG(USE_MARCH_586)
Kyösti Mälkki87efe242018-12-23 07:22:44 +020059 /* MMX registers required here */
60
Lee Leahy2030d252016-06-05 18:41:00 -070061 /* BIST result in eax */
Himanshu Sahdev aka CunningLearner2b840082019-09-08 12:29:27 +053062 movd %eax, %mm0
Lee Leahy2030d252016-06-05 18:41:00 -070063
Kyösti Mälkki6c7441f2020-12-05 08:39:57 +020064__timestamp:
65
Lee Leahy2030d252016-06-05 18:41:00 -070066 /* Get an early timestamp */
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080067 rdtsc
68 movd %eax, %mm1
69 movd %edx, %mm2
Lee Leahy2030d252016-06-05 18:41:00 -070070#endif
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080071
Julius Wernercd49cce2019-03-05 16:53:33 -080072#if CONFIG(SSE)
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080073enable_sse:
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080074 mov %cr4, %eax
Lee Leahy38925972016-06-08 07:11:48 -070075 or $CR4_OSFXSR, %ax
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080076 mov %eax, %cr4
Julius Wernercd49cce2019-03-05 16:53:33 -080077#endif /* CONFIG(SSE) */
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080078
79 /* We're done. Now it's up to platform-specific code */
80 jmp bootblock_pre_c_entry