blob: 0362d104fa8b704ecf11544bce84a12df46d034a [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03002
Martin Rothc87ab012022-11-20 19:32:51 -07003#include <cpu/intel/post_codes.h>
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03004#include <cpu/x86/mtrr.h>
5#include <cpu/x86/cache.h>
6#include <cpu/x86/post_code.h>
7#include <cpu/x86/lapic_def.h>
8
9/* Macro to access Local APIC registers at default base. */
10#define LAPIC(x) $(LAPIC_DEFAULT_BASE | LAPIC_ ## x)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030011
Kyösti Mälkki7522a8f2020-11-20 16:47:38 +020012.section .init
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020013.global bootblock_pre_c_entry
14
Arthur Heymansbc7b63f2021-07-02 10:03:17 +020015#include <cpu/intel/car/cache_as_ram_symbols.inc>
16
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030017.code32
18_cache_as_ram_setup:
19
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020020bootblock_pre_c_entry:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030021
22cache_as_ram:
lilacious40cb3fe2023-06-21 23:24:14 +020023 post_code(POSTCODE_BOOTBLOCK_CAR)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030024
25 movl $LAPIC_BASE_MSR, %ecx
26 rdmsr
27 andl $LAPIC_BASE_MSR_BOOTSTRAP_PROCESSOR, %eax
28 jz ap_init
29
30 /* Clear/disable fixed MTRRs */
Arthur Heymans2834d982022-11-08 15:06:42 +010031 mov $fixed_mtrr_list, %ebx
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030032 xor %eax, %eax
33 xor %edx, %edx
34
35clear_fixed_mtrr:
Arthur Heymans2834d982022-11-08 15:06:42 +010036 movzwl (%ebx), %ecx
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030037 wrmsr
Arthur Heymans2834d982022-11-08 15:06:42 +010038 add $2, %ebx
39 cmp $fixed_mtrr_list_end, %ebx
40 jl clear_fixed_mtrr
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030041
Elyes HAOUAS02820ca2018-09-30 07:44:39 +020042 /* Figure out how many MTRRs we have, and clear them out */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030043 mov $MTRR_CAP_MSR, %ecx
44 rdmsr
45 movzb %al, %ebx /* Number of variable MTRRs */
46 mov $MTRR_PHYS_BASE(0), %ecx
47 xor %eax, %eax
48 xor %edx, %edx
49
50clear_var_mtrr:
51 wrmsr
52 inc %ecx
53 wrmsr
54 inc %ecx
55 dec %ebx
56 jnz clear_var_mtrr
Martin Rothc87ab012022-11-20 19:32:51 -070057 post_code(POST_SOC_SET_DEF_MTRR_TYPE)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030058
59 /* Configure the default memory type to uncacheable. */
60 movl $MTRR_DEF_TYPE_MSR, %ecx
61 rdmsr
62 andl $(~0x00000cff), %eax
63 wrmsr
64
Martin Rothc87ab012022-11-20 19:32:51 -070065 post_code(POST_SOC_DETERMINE_CPU_ADDR_BITS)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030066
67 /* Determine CPU_ADDR_BITS and load PHYSMASK high
68 * word to %edx.
69 */
70 movl $0x80000000, %eax
71 cpuid
72 cmpl $0x80000008, %eax
73 jc addrsize_no_MSR
74 movl $0x80000008, %eax
75 cpuid
76 movb %al, %cl
77 sub $32, %cl
78 movl $1, %edx
79 shl %cl, %edx
80 subl $1, %edx
81 jmp addrsize_set_high
82addrsize_no_MSR:
83 movl $1, %eax
84 cpuid
85 andl $(1 << 6 | 1 << 17), %edx /* PAE or PSE36 */
86 jz addrsize_set_high
87 movl $0x0f, %edx
88
89 /* Preload high word of address mask (in %edx) for Variable
90 * MTRRs 0 and 1 and enable local APIC at default base.
91 */
92addrsize_set_high:
93 xorl %eax, %eax
94 movl $MTRR_PHYS_MASK(0), %ecx
95 wrmsr
96 movl $MTRR_PHYS_MASK(1), %ecx
97 wrmsr
98 movl $LAPIC_BASE_MSR, %ecx
99 not %edx
100 movl %edx, %ebx
101 rdmsr
102 andl %ebx, %edx
103 andl $(~LAPIC_BASE_MSR_ADDR_MASK), %eax
104 orl $(LAPIC_DEFAULT_BASE | LAPIC_BASE_MSR_ENABLE), %eax
105 wrmsr
106
107bsp_init:
108
Martin Rothc87ab012022-11-20 19:32:51 -0700109 post_code(POST_SOC_BSP_INIT)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300110
111 /* Send INIT IPI to all excluding ourself. */
112 movl LAPIC(ICR), %edi
113 movl $(LAPIC_DEST_ALLBUT | LAPIC_INT_ASSERT | LAPIC_DM_INIT), %eax
1141: movl %eax, (%edi)
115 movl $0x30, %ecx
1162: pause
117 dec %ecx
118 jnz 2b
119 movl (%edi), %ecx
120 andl $LAPIC_ICR_BUSY, %ecx
121 jnz 1b
122
Martin Rothc87ab012022-11-20 19:32:51 -0700123 post_code(POST_SOC_COUNT_CORES)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300124
125 movl $1, %eax
126 cpuid
127 btl $28, %edx
128 jnc sipi_complete
129 bswapl %ebx
130 movzx %bh, %edi
131 cmpb $1, %bh
132 jbe sipi_complete /* only one LAPIC ID in package */
133
134 movl $0, %eax
135 cpuid
136 movb $1, %bl
137 cmpl $4, %eax
138 jb cores_counted
139 movl $4, %eax
140 movl $0, %ecx
141 cpuid
142 shr $26, %eax
143 movb %al, %bl
144 inc %bl
145
146cores_counted:
147 movl %edi, %eax
148 divb %bl
149 cmpb $1, %al
150 jbe sipi_complete /* only LAPIC ID of a core */
151
152 /* For a hyper-threading processor, cache must not be disabled
153 * on an AP on the same physical package with the BSP.
154 */
155
156hyper_threading_cpu:
157
Martin Rothc87ab012022-11-20 19:32:51 -0700158 post_code(POST_SOC_CPU_HYPER_THREADING)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300159
160 /* Send Start IPI to all excluding ourself. */
161 movl LAPIC(ICR), %edi
Kyösti Mälkki34856572019-01-09 20:30:52 +0200162 movl $(LAPIC_DEST_ALLBUT | LAPIC_DM_STARTUP), %eax
163 orl $ap_sipi_vector_in_rom, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03001641: movl %eax, (%edi)
165 movl $0x30, %ecx
1662: pause
167 dec %ecx
168 jnz 2b
169 movl (%edi), %ecx
170 andl $LAPIC_ICR_BUSY, %ecx
171 jnz 1b
172
Martin Rothc87ab012022-11-20 19:32:51 -0700173 post_code(POST_SOC_CPU_SIBLING_DELAY)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300174
175 /* Wait for sibling CPU to start. */
1761: movl $(MTRR_PHYS_BASE(0)), %ecx
177 rdmsr
178 andl %eax, %eax
179 jnz sipi_complete
180
181 movl $0x30, %ecx
1822: pause
183 dec %ecx
184 jnz 2b
185 jmp 1b
186
187
188ap_init:
Martin Rothc87ab012022-11-20 19:32:51 -0700189 post_code(POST_SOC_CPU_AP_INIT)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300190
191 /* Do not disable cache (so BSP can enable it). */
192 movl %cr0, %eax
193 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
194 movl %eax, %cr0
195
Martin Rothc87ab012022-11-20 19:32:51 -0700196 post_code(POST_SOC_SET_MTRR_BASE)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300197
198 /* MTRR registers are shared between HT siblings. */
199 movl $(MTRR_PHYS_BASE(0)), %ecx
200 movl $(1 << 12), %eax
201 xorl %edx, %edx
202 wrmsr
203
Martin Rothc87ab012022-11-20 19:32:51 -0700204 post_code(POST_SOC_AP_HALT)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300205
206ap_halt:
207 cli
2081: hlt
209 jmp 1b
210
211
212
213sipi_complete:
214
Martin Rothc87ab012022-11-20 19:32:51 -0700215 post_code(POST_SOC_SET_CAR_BASE)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300216
217 /* Set Cache-as-RAM base address. */
218 movl $(MTRR_PHYS_BASE(0)), %ecx
Arthur Heymansbc7b63f2021-07-02 10:03:17 +0200219 movl car_mtrr_start, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200220 orl $MTRR_TYPE_WRBACK, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300221 xorl %edx, %edx
222 wrmsr
223
224 /* Set Cache-as-RAM mask. */
225 movl $(MTRR_PHYS_MASK(0)), %ecx
226 rdmsr
Arthur Heymansbc7b63f2021-07-02 10:03:17 +0200227 movl car_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200228 orl $MTRR_PHYS_MASK_VALID, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300229 wrmsr
230
Martin Rothc87ab012022-11-20 19:32:51 -0700231 post_code(POST_SOC_ENABLE_MTRRS)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300232
233 /* Enable MTRR. */
234 movl $MTRR_DEF_TYPE_MSR, %ecx
235 rdmsr
236 orl $MTRR_DEF_TYPE_EN, %eax
237 wrmsr
238
239 /* Enable L2 cache Write-Back (WBINVD and FLUSH#).
240 *
241 * MSR is set when DisplayFamily_DisplayModel is one of:
242 * 06_0x, 06_17, 06_1C
243 *
244 * Description says this bit enables use of WBINVD and FLUSH#.
245 * Should this be set only after the system bus and/or memory
246 * controller can successfully handle write cycles?
247 */
248
249#define EAX_FAMILY(a) (a << 8) /* for family <= 0fH */
250#define EAX_MODEL(a) (((a & 0xf0) << 12) | ((a & 0xf) << 4))
251
252 movl $1, %eax
253 cpuid
254 movl %eax, %ebx
255 andl $EAX_FAMILY(0x0f), %eax
256 cmpl $EAX_FAMILY(0x06), %eax
257 jne no_msr_11e
258 movl %ebx, %eax
259 andl $EAX_MODEL(0xff), %eax
260 cmpl $EAX_MODEL(0x17), %eax
261 je has_msr_11e
262 cmpl $EAX_MODEL(0x1c), %eax
263 je has_msr_11e
264 andl $EAX_MODEL(0xf0), %eax
265 cmpl $EAX_MODEL(0x00), %eax
266 jne no_msr_11e
267has_msr_11e:
268 movl $0x11e, %ecx
269 rdmsr
270 orl $(1 << 8), %eax
271 wrmsr
272no_msr_11e:
273
Martin Rothc87ab012022-11-20 19:32:51 -0700274 post_code(POST_SOC_ENABLE_CACHE)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300275
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100276 /* Cache the whole rom to fetch microcode updates */
277 movl $MTRR_PHYS_BASE(1), %ecx
278 xorl %edx, %edx
Arthur Heymansbc7b63f2021-07-02 10:03:17 +0200279 movl rom_mtrr_base, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200280 orl $MTRR_TYPE_WRPROT, %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100281 wrmsr
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100282 movl $MTRR_PHYS_MASK(1), %ecx
283 rdmsr
Arthur Heymansbc7b63f2021-07-02 10:03:17 +0200284 movl rom_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200285 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100286 wrmsr
287
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300288 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
289 movl %cr0, %eax
290 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
291 invd
292 movl %eax, %cr0
293
Julius Wernercd49cce2019-03-05 16:53:33 -0800294#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100295 update_microcode:
296 /* put the return address in %esp */
297 movl $end_microcode_update, %esp
298 jmp update_bsp_microcode
299 end_microcode_update:
300#endif
Martin Rothc87ab012022-11-20 19:32:51 -0700301 post_code(POST_SOC_DISABLE_CACHE)
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100302 /* Disable caching to change MTRR's. */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300303 movl %cr0, %eax
304 orl $CR0_CacheDisable, %eax
305 movl %eax, %cr0
306
Arthur Heymans7875dbd2018-06-16 20:01:47 +0200307 /*
308 * An unidentified combination of speculative reads and branch
309 * predictions inside WRPROT-cacheable memory can cause invalidation
310 * of cachelines and loss of stack on models based on NetBurst
311 * microarchitecture. Therefore disable WRPROT region entirely for
312 * all family F models.
313 */
314 movl $1, %eax
315 cpuid
316 cmp $0xf, %ah
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100317 jne cache_rom
Arthur Heymans7875dbd2018-06-16 20:01:47 +0200318
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100319disable_cache_rom:
320 movl $MTRR_PHYS_MASK(1), %ecx
321 rdmsr
322 andl $(~MTRR_PHYS_MASK_VALID), %eax
323 wrmsr
324 jmp fill_cache
325
326cache_rom:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300327 /* Enable cache for our code in Flash because we do XIP here */
328 movl $MTRR_PHYS_BASE(1), %ecx
329 xorl %edx, %edx
Kyösti Mälkkice9f4222018-06-25 18:53:36 +0300330 movl $_program, %eax
Arthur Heymansbc7b63f2021-07-02 10:03:17 +0200331 andl xip_mtrr_mask, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300332 orl $MTRR_TYPE_WRPROT, %eax
333 wrmsr
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300334 movl $MTRR_PHYS_MASK(1), %ecx
335 rdmsr
Arthur Heymansbc7b63f2021-07-02 10:03:17 +0200336 movl xip_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200337 orl $MTRR_PHYS_MASK_VALID, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300338 wrmsr
339
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100340fill_cache:
Martin Rothc87ab012022-11-20 19:32:51 -0700341 post_code(POST_SOC_FILL_CACHE)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300342 /* Enable cache. */
343 movl %cr0, %eax
344 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100345 invd
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300346 movl %eax, %cr0
347
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100348 /* Clear the cache memory region. This will also fill up the cache. */
349 cld
350 xorl %eax, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200351 movl $_car_mtrr_start, %edi
352 movl $_car_mtrr_size, %ecx
353 shr $2, %ecx
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100354 rep stosl
355
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300356 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100357 mov $_ecar_stack, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200358
359 /* Need to align stack to 16 bytes at call instruction. Account for
360 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200361 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200362 subl $4, %esp
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300363
Arthur Heymans8a6053b2021-07-02 10:07:34 +0200364#if ENV_X86_64
365 #include <cpu/x86/64bit/entry64.inc>
366
367 movd %mm2, %rdi
368 shlq $32, %rdi /* BIST */
369 movd %mm1, %rsi
370 or %rsi, %rdi /* tsc[63:32] */
371 movd %mm0, %rsi /* tsc[31:0] */
372
373#else
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200374 /* push TSC and BIST to stack */
375 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100376 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200377 movd %mm2, %eax
378 pushl %eax /* tsc[63:32] */
379 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100380 pushl %eax /* tsc[31:0] */
Arthur Heymans8a6053b2021-07-02 10:07:34 +0200381#endif
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300382
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200383before_c_entry:
Martin Rothc87ab012022-11-20 19:32:51 -0700384 post_code(POST_BOOTBLOCK_BEFORE_C_ENTRY)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200385 call bootblock_c_entry_bist
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300386
387 /* Should never see this postcode */
lilacious40cb3fe2023-06-21 23:24:14 +0200388 post_code(POSTCODE_DEAD_CODE)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300389
390.Lhlt:
391 hlt
392 jmp .Lhlt
393
394fixed_mtrr_list:
395 .word MTRR_FIX_64K_00000
396 .word MTRR_FIX_16K_80000
397 .word MTRR_FIX_16K_A0000
398 .word MTRR_FIX_4K_C0000
399 .word MTRR_FIX_4K_C8000
400 .word MTRR_FIX_4K_D0000
401 .word MTRR_FIX_4K_D8000
402 .word MTRR_FIX_4K_E0000
403 .word MTRR_FIX_4K_E8000
404 .word MTRR_FIX_4K_F0000
405 .word MTRR_FIX_4K_F8000
Arthur Heymans2834d982022-11-08 15:06:42 +0100406fixed_mtrr_list_end:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300407
408_cache_as_ram_setup_end: