blob: 5c29581090a8d5f8bc19b625278ee67a96d36807 [file] [log] [blame]
Patrick Georgi11f00792020-03-04 15:10:45 +01001/* SPDX-License-Identifier: GPL-2.0-or-later */
Kyösti Mälkki47d58e52020-11-22 00:55:31 +02002
3/* For starting coreboot in protected mode */
4
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -08005/*
Arthur Heymansc05b1a62019-11-22 21:01:30 +01006 * This is the modern bootblock. It prepares the system for C environment runtime
Martin Roth20bbd812019-08-30 21:09:37 -06007 * setup. The actual setup is done by hardware-specific code.
8 *
9 * It provides a bootflow similar to other architectures, and thus is considered
10 * to be the modern approach.
11 *
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080012 */
13
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020014#include <arch/rom_segs.h>
Furquan Shaikhb3ee03c2016-04-13 17:15:36 -070015#include <cpu/x86/cr.h>
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020016#include <cpu/x86/post_code.h>
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080017
Kyösti Mälkki03083132020-11-22 00:34:13 +020018.section .init, "ax", @progbits
Kyösti Mälkkidf771c12019-12-21 10:17:56 +020019
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020020 .code32
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080021/*
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020022 * When we come here we are in protected mode.
23 * NOTE aligned to 4 so that we are sure that the prefetch
24 * cache will be reloaded.
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080025 */
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020026 .align 4
27
Kyösti Mälkki6c7441f2020-12-05 08:39:57 +020028.globl bootblock_protected_mode_entry
29bootblock_protected_mode_entry:
30
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020031 /* Save the BIST value */
32 movl %eax, %ebp
33
lilacious40cb3fe2023-06-21 23:24:14 +020034 post_code(POSTCODE_ENTER_PROTECTED_MODE)
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020035
36 movw $ROM_DATA_SEG, %ax
37 movw %ax, %ds
38 movw %ax, %es
39 movw %ax, %ss
Raul E Rangelea5c3112021-09-21 10:17:24 -060040 xor %ax, %ax /* zero out the gs and fs segment index */
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020041 movw %ax, %fs
Raul E Rangelea5c3112021-09-21 10:17:24 -060042 movw %ax, %gs /* Will be used for cpu_info */
Kyösti Mälkki47d58e52020-11-22 00:55:31 +020043
44 /* Restore the BIST value to %eax */
45 movl %ebp, %eax
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080046
Julius Wernercd49cce2019-03-05 16:53:33 -080047#if CONFIG(BOOTBLOCK_DEBUG_SPINLOOP)
Lee Leahyfdc8c8b2016-06-07 08:45:17 -070048
49 /* Wait for a JTAG debugger to break in and set EBX non-zero */
50 xor %ebx, %ebx
51
52debug_spinloop:
53 cmp $0, %ebx
54 jz debug_spinloop
55#endif
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080056
Kyösti Mälkki87efe242018-12-23 07:22:44 +020057 /* MMX registers required here */
58
Lee Leahy2030d252016-06-05 18:41:00 -070059 /* BIST result in eax */
Himanshu Sahdev aka CunningLearner2b840082019-09-08 12:29:27 +053060 movd %eax, %mm0
Lee Leahy2030d252016-06-05 18:41:00 -070061
Kyösti Mälkki6c7441f2020-12-05 08:39:57 +020062__timestamp:
63
Lee Leahy2030d252016-06-05 18:41:00 -070064 /* Get an early timestamp */
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080065 rdtsc
66 movd %eax, %mm1
67 movd %edx, %mm2
68
Julius Wernercd49cce2019-03-05 16:53:33 -080069#if CONFIG(SSE)
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080070enable_sse:
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080071 mov %cr4, %eax
Lee Leahy38925972016-06-08 07:11:48 -070072 or $CR4_OSFXSR, %ax
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080073 mov %eax, %cr4
Julius Wernercd49cce2019-03-05 16:53:33 -080074#endif /* CONFIG(SSE) */
Alexandru Gagniuc6be6c8f2016-01-26 18:22:43 -080075
76 /* We're done. Now it's up to platform-specific code */
77 jmp bootblock_pre_c_entry