blob: fffe3f80b4b1f1275220697130609e07f74f46f5 [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Arthur Heymans7a8205b2018-06-03 10:29:07 +02002
Martin Rothc87ab012022-11-20 19:32:51 -07003#include <cpu/intel/post_codes.h>
Arthur Heymans7a8205b2018-06-03 10:29:07 +02004#include <cpu/x86/mtrr.h>
5#include <cpu/x86/cache.h>
6#include <cpu/x86/post_code.h>
7
Kyösti Mälkki7522a8f2020-11-20 16:47:38 +02008.section .init
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +02009.global bootblock_pre_c_entry
10
Patrick Rudolphc439e072020-09-28 22:31:06 +020011#include <cpu/intel/car/cache_as_ram_symbols.inc>
12
Arthur Heymans7a8205b2018-06-03 10:29:07 +020013.code32
14_cache_as_ram_setup:
15
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020016bootblock_pre_c_entry:
Arthur Heymans7a8205b2018-06-03 10:29:07 +020017
18cache_as_ram:
Martin Rothc87ab012022-11-20 19:32:51 -070019 post_code(POST_BOOTBLOCK_CAR)
Arthur Heymans7a8205b2018-06-03 10:29:07 +020020
21 /* Send INIT IPI to all excluding ourself. */
22 movl $0x000C4500, %eax
23 movl $0xFEE00300, %esi
24 movl %eax, (%esi)
25
Arthur Heymans3aa9adb2018-06-03 11:02:54 +020026 /* All CPUs need to be in Wait for SIPI state */
27wait_for_sipi:
28 movl (%esi), %eax
29 bt $12, %eax
30 jc wait_for_sipi
31
Martin Rothc87ab012022-11-20 19:32:51 -070032 post_code(POST_SOC_CLEAR_FIXED_MTRRS)
Arthur Heymans3aa9adb2018-06-03 11:02:54 +020033
34 /* Clear/disable fixed MTRRs */
35 mov $fixed_mtrr_list_size, %ebx
36 xor %eax, %eax
37 xor %edx, %edx
38
39clear_fixed_mtrr:
40 add $-2, %ebx
41 movzwl fixed_mtrr_list(%ebx), %ecx
Arthur Heymans7a8205b2018-06-03 10:29:07 +020042 wrmsr
Arthur Heymans3aa9adb2018-06-03 11:02:54 +020043 jnz clear_fixed_mtrr
44
Elyes HAOUAS02820ca2018-09-30 07:44:39 +020045 /* Figure out how many MTRRs we have, and clear them out */
Arthur Heymans3aa9adb2018-06-03 11:02:54 +020046 mov $MTRR_CAP_MSR, %ecx
47 rdmsr
48 movzb %al, %ebx /* Number of variable MTRRs */
49 mov $MTRR_PHYS_BASE(0), %ecx
50 xor %eax, %eax
51 xor %edx, %edx
52
53clear_var_mtrr:
54 wrmsr
55 inc %ecx
56 wrmsr
57 inc %ecx
58 dec %ebx
59 jnz clear_var_mtrr
Arthur Heymans7a8205b2018-06-03 10:29:07 +020060
Martin Rothc87ab012022-11-20 19:32:51 -070061 post_code(POST_SOC_SET_DEF_MTRR_TYPE)
Arthur Heymans7a8205b2018-06-03 10:29:07 +020062 /* Configure the default memory type to uncacheable. */
63 movl $MTRR_DEF_TYPE_MSR, %ecx
64 rdmsr
65 andl $(~0x00000cff), %eax
66 wrmsr
67
Arthur Heymans3aa9adb2018-06-03 11:02:54 +020068 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
69 movl $0x80000008, %eax
70 cpuid
71 movb %al, %cl
72 sub $32, %cl
73 movl $1, %edx
74 shl %cl, %edx
75 subl $1, %edx
76
77 /* Preload high word of address mask (in %edx) for Variable
78 MTRRs 0 and 1. */
79addrsize_set_high:
80 xorl %eax, %eax
81 movl $MTRR_PHYS_MASK(0), %ecx
82 wrmsr
83 movl $MTRR_PHYS_MASK(1), %ecx
84 wrmsr
85
Martin Rothc87ab012022-11-20 19:32:51 -070086 post_code(POST_SOC_SET_MTRR_BASE)
Arthur Heymans7a8205b2018-06-03 10:29:07 +020087 /* Set Cache-as-RAM base address. */
88 movl $(MTRR_PHYS_BASE(0)), %ecx
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020089 movl $_car_mtrr_start, %eax
90 orl $MTRR_TYPE_WRBACK, %eax
Arthur Heymans7a8205b2018-06-03 10:29:07 +020091 xorl %edx, %edx
92 wrmsr
93
Martin Rothc87ab012022-11-20 19:32:51 -070094 post_code(POST_SOC_SET_MTRR_MASK)
Arthur Heymans7a8205b2018-06-03 10:29:07 +020095 /* Set Cache-as-RAM mask. */
96 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymans3aa9adb2018-06-03 11:02:54 +020097 rdmsr
Patrick Rudolphc439e072020-09-28 22:31:06 +020098 movl car_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020099 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200100 wrmsr
101
Martin Rothc87ab012022-11-20 19:32:51 -0700102 post_code(POST_SOC_ENABLE_MTRRS)
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200103
104 /* Enable MTRR. */
105 movl $MTRR_DEF_TYPE_MSR, %ecx
106 rdmsr
107 orl $MTRR_DEF_TYPE_EN, %eax
108 wrmsr
109
110 /* Enable L2 cache. */
111 movl $0x11e, %ecx
112 rdmsr
113 orl $(1 << 8), %eax
114 wrmsr
115
116 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
117 movl %cr0, %eax
118 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
119 invd
120 movl %eax, %cr0
121
122 /* Clear the cache memory region. This will also fill up the cache. */
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200123 cld
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200124 xorl %eax, %eax
Patrick Rudolphc439e072020-09-28 22:31:06 +0200125 movl car_mtrr_start, %edi
126 movl car_mtrr_size, %ecx
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200127 shr $2, %ecx
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200128 rep stosl
129
Martin Rothc87ab012022-11-20 19:32:51 -0700130 post_code(POST_SOC_DISABLE_CACHE)
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200131 /* Enable Cache-as-RAM mode by disabling cache. */
132 movl %cr0, %eax
133 orl $CR0_CacheDisable, %eax
134 movl %eax, %cr0
135
136 /* Enable cache for our code in Flash because we do XIP here */
137 movl $MTRR_PHYS_BASE(1), %ecx
138 xorl %edx, %edx
Kyösti Mälkkice9f4222018-06-25 18:53:36 +0300139 movl $_program, %eax
Patrick Rudolphc439e072020-09-28 22:31:06 +0200140 andl xip_mtrr_mask, %eax
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200141 orl $MTRR_TYPE_WRPROT, %eax
142 wrmsr
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200143 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymans3aa9adb2018-06-03 11:02:54 +0200144 rdmsr
Patrick Rudolphc439e072020-09-28 22:31:06 +0200145 movl xip_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200146 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200147 wrmsr
148
Martin Rothc87ab012022-11-20 19:32:51 -0700149 post_code(POST_SOC_ENABLE_CACHE)
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200150 /* Enable cache. */
151 movl %cr0, %eax
152 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
153 movl %eax, %cr0
154
155 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100156 mov $_ecar_stack, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200157
158 /* Need to align stack to 16 bytes at call instruction. Account for
159 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200160 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200161 subl $4, %esp
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200162
Patrick Rudolphc439e072020-09-28 22:31:06 +0200163#if ENV_X86_64
164
165 #include <cpu/x86/64bit/entry64.inc>
166
167 movd %mm2, %rdi
168 shlq $32, %rdi
169 movd %mm1, %rsi
170 or %rsi, %rdi
171 movd %mm0, %rsi
172#else
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200173 /* push TSC and BIST to stack */
174 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100175 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200176 movd %mm2, %eax
177 pushl %eax /* tsc[63:32] */
178 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100179 pushl %eax /* tsc[31:0] */
Patrick Rudolphc439e072020-09-28 22:31:06 +0200180#endif
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200181
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200182before_c_entry:
Martin Rothc87ab012022-11-20 19:32:51 -0700183 post_code(POST_BOOTBLOCK_BEFORE_C_ENTRY)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200184 call bootblock_c_entry_bist
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200185
186 /* Should never see this postcode */
187 post_code(POST_DEAD_CODE)
188
189.Lhlt:
190 hlt
191 jmp .Lhlt
192
Arthur Heymans3aa9adb2018-06-03 11:02:54 +0200193fixed_mtrr_list:
194 .word MTRR_FIX_64K_00000
195 .word MTRR_FIX_16K_80000
196 .word MTRR_FIX_16K_A0000
197 .word MTRR_FIX_4K_C0000
198 .word MTRR_FIX_4K_C8000
199 .word MTRR_FIX_4K_D0000
200 .word MTRR_FIX_4K_D8000
201 .word MTRR_FIX_4K_E0000
202 .word MTRR_FIX_4K_E8000
203 .word MTRR_FIX_4K_F0000
204 .word MTRR_FIX_4K_F8000
205fixed_mtrr_list_size = . - fixed_mtrr_list
Arthur Heymans7a8205b2018-06-03 10:29:07 +0200206
207_cache_as_ram_setup_end: