blob: 1277ef4d55098d765a77bcae67cc12bb9d8521e0 [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03002
3#include <cpu/x86/mtrr.h>
4#include <cpu/x86/cache.h>
5#include <cpu/x86/post_code.h>
6#include <cpu/x86/lapic_def.h>
7
8/* Macro to access Local APIC registers at default base. */
9#define LAPIC(x) $(LAPIC_DEFAULT_BASE | LAPIC_ ## x)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030010
11#define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
12#define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
13
Arthur Heymans942ad6a2019-10-12 18:06:46 +020014#if ((CONFIG_C_ENV_BOOTBLOCK_SIZE & (CONFIG_C_ENV_BOOTBLOCK_SIZE - 1)) != 0)
15#error "CONFIG_C_ENV_BOOTBLOCK_SIZE must be a power of 2!"
16#endif
17#define XIP_ROM_SIZE CONFIG_C_ENV_BOOTBLOCK_SIZE
Arthur Heymans942ad6a2019-10-12 18:06:46 +020018
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020019.global bootblock_pre_c_entry
20
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030021.code32
22_cache_as_ram_setup:
23
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020024bootblock_pre_c_entry:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030025
26cache_as_ram:
27 post_code(0x20)
28
29 movl $LAPIC_BASE_MSR, %ecx
30 rdmsr
31 andl $LAPIC_BASE_MSR_BOOTSTRAP_PROCESSOR, %eax
32 jz ap_init
33
34 /* Clear/disable fixed MTRRs */
35 mov $fixed_mtrr_list_size, %ebx
36 xor %eax, %eax
37 xor %edx, %edx
38
39clear_fixed_mtrr:
40 add $-2, %ebx
41 movzwl fixed_mtrr_list(%ebx), %ecx
42 wrmsr
43 jnz clear_fixed_mtrr
44
Elyes HAOUAS02820ca2018-09-30 07:44:39 +020045 /* Figure out how many MTRRs we have, and clear them out */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030046 mov $MTRR_CAP_MSR, %ecx
47 rdmsr
48 movzb %al, %ebx /* Number of variable MTRRs */
49 mov $MTRR_PHYS_BASE(0), %ecx
50 xor %eax, %eax
51 xor %edx, %edx
52
53clear_var_mtrr:
54 wrmsr
55 inc %ecx
56 wrmsr
57 inc %ecx
58 dec %ebx
59 jnz clear_var_mtrr
60 post_code(0x21)
61
62 /* Configure the default memory type to uncacheable. */
63 movl $MTRR_DEF_TYPE_MSR, %ecx
64 rdmsr
65 andl $(~0x00000cff), %eax
66 wrmsr
67
68 post_code(0x22)
69
70 /* Determine CPU_ADDR_BITS and load PHYSMASK high
71 * word to %edx.
72 */
73 movl $0x80000000, %eax
74 cpuid
75 cmpl $0x80000008, %eax
76 jc addrsize_no_MSR
77 movl $0x80000008, %eax
78 cpuid
79 movb %al, %cl
80 sub $32, %cl
81 movl $1, %edx
82 shl %cl, %edx
83 subl $1, %edx
84 jmp addrsize_set_high
85addrsize_no_MSR:
86 movl $1, %eax
87 cpuid
88 andl $(1 << 6 | 1 << 17), %edx /* PAE or PSE36 */
89 jz addrsize_set_high
90 movl $0x0f, %edx
91
92 /* Preload high word of address mask (in %edx) for Variable
93 * MTRRs 0 and 1 and enable local APIC at default base.
94 */
95addrsize_set_high:
96 xorl %eax, %eax
97 movl $MTRR_PHYS_MASK(0), %ecx
98 wrmsr
99 movl $MTRR_PHYS_MASK(1), %ecx
100 wrmsr
101 movl $LAPIC_BASE_MSR, %ecx
102 not %edx
103 movl %edx, %ebx
104 rdmsr
105 andl %ebx, %edx
106 andl $(~LAPIC_BASE_MSR_ADDR_MASK), %eax
107 orl $(LAPIC_DEFAULT_BASE | LAPIC_BASE_MSR_ENABLE), %eax
108 wrmsr
109
110bsp_init:
111
112 post_code(0x23)
113
114 /* Send INIT IPI to all excluding ourself. */
115 movl LAPIC(ICR), %edi
116 movl $(LAPIC_DEST_ALLBUT | LAPIC_INT_ASSERT | LAPIC_DM_INIT), %eax
1171: movl %eax, (%edi)
118 movl $0x30, %ecx
1192: pause
120 dec %ecx
121 jnz 2b
122 movl (%edi), %ecx
123 andl $LAPIC_ICR_BUSY, %ecx
124 jnz 1b
125
126 post_code(0x24)
127
128 movl $1, %eax
129 cpuid
130 btl $28, %edx
131 jnc sipi_complete
132 bswapl %ebx
133 movzx %bh, %edi
134 cmpb $1, %bh
135 jbe sipi_complete /* only one LAPIC ID in package */
136
137 movl $0, %eax
138 cpuid
139 movb $1, %bl
140 cmpl $4, %eax
141 jb cores_counted
142 movl $4, %eax
143 movl $0, %ecx
144 cpuid
145 shr $26, %eax
146 movb %al, %bl
147 inc %bl
148
149cores_counted:
150 movl %edi, %eax
151 divb %bl
152 cmpb $1, %al
153 jbe sipi_complete /* only LAPIC ID of a core */
154
155 /* For a hyper-threading processor, cache must not be disabled
156 * on an AP on the same physical package with the BSP.
157 */
158
159hyper_threading_cpu:
160
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300161 post_code(0x25)
162
163 /* Send Start IPI to all excluding ourself. */
164 movl LAPIC(ICR), %edi
Kyösti Mälkki34856572019-01-09 20:30:52 +0200165 movl $(LAPIC_DEST_ALLBUT | LAPIC_DM_STARTUP), %eax
166 orl $ap_sipi_vector_in_rom, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03001671: movl %eax, (%edi)
168 movl $0x30, %ecx
1692: pause
170 dec %ecx
171 jnz 2b
172 movl (%edi), %ecx
173 andl $LAPIC_ICR_BUSY, %ecx
174 jnz 1b
175
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300176 post_code(0x26)
177
178 /* Wait for sibling CPU to start. */
1791: movl $(MTRR_PHYS_BASE(0)), %ecx
180 rdmsr
181 andl %eax, %eax
182 jnz sipi_complete
183
184 movl $0x30, %ecx
1852: pause
186 dec %ecx
187 jnz 2b
188 jmp 1b
189
190
191ap_init:
192 post_code(0x27)
193
194 /* Do not disable cache (so BSP can enable it). */
195 movl %cr0, %eax
196 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
197 movl %eax, %cr0
198
199 post_code(0x28)
200
201 /* MTRR registers are shared between HT siblings. */
202 movl $(MTRR_PHYS_BASE(0)), %ecx
203 movl $(1 << 12), %eax
204 xorl %edx, %edx
205 wrmsr
206
207 post_code(0x29)
208
209ap_halt:
210 cli
2111: hlt
212 jmp 1b
213
214
215
216sipi_complete:
217
218 post_code(0x2a)
219
220 /* Set Cache-as-RAM base address. */
221 movl $(MTRR_PHYS_BASE(0)), %ecx
222 movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
223 xorl %edx, %edx
224 wrmsr
225
226 /* Set Cache-as-RAM mask. */
227 movl $(MTRR_PHYS_MASK(0)), %ecx
228 rdmsr
229 movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
230 wrmsr
231
232 post_code(0x2b)
233
234 /* Enable MTRR. */
235 movl $MTRR_DEF_TYPE_MSR, %ecx
236 rdmsr
237 orl $MTRR_DEF_TYPE_EN, %eax
238 wrmsr
239
240 /* Enable L2 cache Write-Back (WBINVD and FLUSH#).
241 *
242 * MSR is set when DisplayFamily_DisplayModel is one of:
243 * 06_0x, 06_17, 06_1C
244 *
245 * Description says this bit enables use of WBINVD and FLUSH#.
246 * Should this be set only after the system bus and/or memory
247 * controller can successfully handle write cycles?
248 */
249
250#define EAX_FAMILY(a) (a << 8) /* for family <= 0fH */
251#define EAX_MODEL(a) (((a & 0xf0) << 12) | ((a & 0xf) << 4))
252
253 movl $1, %eax
254 cpuid
255 movl %eax, %ebx
256 andl $EAX_FAMILY(0x0f), %eax
257 cmpl $EAX_FAMILY(0x06), %eax
258 jne no_msr_11e
259 movl %ebx, %eax
260 andl $EAX_MODEL(0xff), %eax
261 cmpl $EAX_MODEL(0x17), %eax
262 je has_msr_11e
263 cmpl $EAX_MODEL(0x1c), %eax
264 je has_msr_11e
265 andl $EAX_MODEL(0xf0), %eax
266 cmpl $EAX_MODEL(0x00), %eax
267 jne no_msr_11e
268has_msr_11e:
269 movl $0x11e, %ecx
270 rdmsr
271 orl $(1 << 8), %eax
272 wrmsr
273no_msr_11e:
274
275 post_code(0x2c)
276
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100277 /* Cache the whole rom to fetch microcode updates */
278 movl $MTRR_PHYS_BASE(1), %ecx
279 xorl %edx, %edx
Arthur Heymanseeedf832019-02-08 16:27:35 +0100280 movl $(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100281 wrmsr
282
283 movl $MTRR_PHYS_MASK(1), %ecx
284 rdmsr
285 movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
286 wrmsr
287
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300288 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
289 movl %cr0, %eax
290 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
291 invd
292 movl %eax, %cr0
293
Julius Wernercd49cce2019-03-05 16:53:33 -0800294#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100295 update_microcode:
296 /* put the return address in %esp */
297 movl $end_microcode_update, %esp
298 jmp update_bsp_microcode
299 end_microcode_update:
300#endif
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300301 post_code(0x2d)
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100302 /* Disable caching to change MTRR's. */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300303 movl %cr0, %eax
304 orl $CR0_CacheDisable, %eax
305 movl %eax, %cr0
306
Arthur Heymans7875dbd2018-06-16 20:01:47 +0200307 /*
308 * An unidentified combination of speculative reads and branch
309 * predictions inside WRPROT-cacheable memory can cause invalidation
310 * of cachelines and loss of stack on models based on NetBurst
311 * microarchitecture. Therefore disable WRPROT region entirely for
312 * all family F models.
313 */
314 movl $1, %eax
315 cpuid
316 cmp $0xf, %ah
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100317 jne cache_rom
Arthur Heymans7875dbd2018-06-16 20:01:47 +0200318
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100319disable_cache_rom:
320 movl $MTRR_PHYS_MASK(1), %ecx
321 rdmsr
322 andl $(~MTRR_PHYS_MASK_VALID), %eax
323 wrmsr
324 jmp fill_cache
325
326cache_rom:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300327 /* Enable cache for our code in Flash because we do XIP here */
328 movl $MTRR_PHYS_BASE(1), %ecx
329 xorl %edx, %edx
330 /*
331 * IMPORTANT: The following calculation _must_ be done at runtime. See
Stefan Taunerde028782018-08-19 20:02:05 +0200332 * https://mail.coreboot.org/pipermail/coreboot/2010-October/060922.html
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300333 */
Kyösti Mälkkice9f4222018-06-25 18:53:36 +0300334 movl $_program, %eax
Arthur Heymans942ad6a2019-10-12 18:06:46 +0200335 andl $(~(XIP_ROM_SIZE - 1)), %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300336 orl $MTRR_TYPE_WRPROT, %eax
337 wrmsr
338
339 movl $MTRR_PHYS_MASK(1), %ecx
340 rdmsr
Arthur Heymans942ad6a2019-10-12 18:06:46 +0200341 movl $(~(XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300342 wrmsr
343
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100344fill_cache:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300345 post_code(0x2e)
346 /* Enable cache. */
347 movl %cr0, %eax
348 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100349 invd
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300350 movl %eax, %cr0
351
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100352 /* Clear the cache memory region. This will also fill up the cache. */
353 cld
354 xorl %eax, %eax
355 movl $CACHE_AS_RAM_BASE, %edi
356 movl $(CACHE_AS_RAM_SIZE >> 2), %ecx
357 rep stosl
358
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300359 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100360 mov $_ecar_stack, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200361
362 /* Need to align stack to 16 bytes at call instruction. Account for
363 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200364 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200365 subl $4, %esp
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300366
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200367 /* push TSC and BIST to stack */
368 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100369 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200370 movd %mm2, %eax
371 pushl %eax /* tsc[63:32] */
372 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100373 pushl %eax /* tsc[31:0] */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300374
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200375before_c_entry:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300376 post_code(0x2f)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200377 call bootblock_c_entry_bist
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300378
379 /* Should never see this postcode */
380 post_code(POST_DEAD_CODE)
381
382.Lhlt:
383 hlt
384 jmp .Lhlt
385
386fixed_mtrr_list:
387 .word MTRR_FIX_64K_00000
388 .word MTRR_FIX_16K_80000
389 .word MTRR_FIX_16K_A0000
390 .word MTRR_FIX_4K_C0000
391 .word MTRR_FIX_4K_C8000
392 .word MTRR_FIX_4K_D0000
393 .word MTRR_FIX_4K_D8000
394 .word MTRR_FIX_4K_E0000
395 .word MTRR_FIX_4K_E8000
396 .word MTRR_FIX_4K_F0000
397 .word MTRR_FIX_4K_F8000
398fixed_mtrr_list_size = . - fixed_mtrr_list
399
400_cache_as_ram_setup_end: