blob: 103d9e97f99f72c62437e1e650c08493f79d695d [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03002
3#include <cpu/x86/mtrr.h>
4#include <cpu/x86/cache.h>
5#include <cpu/x86/post_code.h>
6#include <cpu/x86/lapic_def.h>
7
8/* Macro to access Local APIC registers at default base. */
9#define LAPIC(x) $(LAPIC_DEFAULT_BASE | LAPIC_ ## x)
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030010
Kyösti Mälkki7522a8f2020-11-20 16:47:38 +020011.section .init
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020012.global bootblock_pre_c_entry
13
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030014.code32
15_cache_as_ram_setup:
16
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020017bootblock_pre_c_entry:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030018
19cache_as_ram:
20 post_code(0x20)
21
22 movl $LAPIC_BASE_MSR, %ecx
23 rdmsr
24 andl $LAPIC_BASE_MSR_BOOTSTRAP_PROCESSOR, %eax
25 jz ap_init
26
27 /* Clear/disable fixed MTRRs */
28 mov $fixed_mtrr_list_size, %ebx
29 xor %eax, %eax
30 xor %edx, %edx
31
32clear_fixed_mtrr:
33 add $-2, %ebx
34 movzwl fixed_mtrr_list(%ebx), %ecx
35 wrmsr
36 jnz clear_fixed_mtrr
37
Elyes HAOUAS02820ca2018-09-30 07:44:39 +020038 /* Figure out how many MTRRs we have, and clear them out */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +030039 mov $MTRR_CAP_MSR, %ecx
40 rdmsr
41 movzb %al, %ebx /* Number of variable MTRRs */
42 mov $MTRR_PHYS_BASE(0), %ecx
43 xor %eax, %eax
44 xor %edx, %edx
45
46clear_var_mtrr:
47 wrmsr
48 inc %ecx
49 wrmsr
50 inc %ecx
51 dec %ebx
52 jnz clear_var_mtrr
53 post_code(0x21)
54
55 /* Configure the default memory type to uncacheable. */
56 movl $MTRR_DEF_TYPE_MSR, %ecx
57 rdmsr
58 andl $(~0x00000cff), %eax
59 wrmsr
60
61 post_code(0x22)
62
63 /* Determine CPU_ADDR_BITS and load PHYSMASK high
64 * word to %edx.
65 */
66 movl $0x80000000, %eax
67 cpuid
68 cmpl $0x80000008, %eax
69 jc addrsize_no_MSR
70 movl $0x80000008, %eax
71 cpuid
72 movb %al, %cl
73 sub $32, %cl
74 movl $1, %edx
75 shl %cl, %edx
76 subl $1, %edx
77 jmp addrsize_set_high
78addrsize_no_MSR:
79 movl $1, %eax
80 cpuid
81 andl $(1 << 6 | 1 << 17), %edx /* PAE or PSE36 */
82 jz addrsize_set_high
83 movl $0x0f, %edx
84
85 /* Preload high word of address mask (in %edx) for Variable
86 * MTRRs 0 and 1 and enable local APIC at default base.
87 */
88addrsize_set_high:
89 xorl %eax, %eax
90 movl $MTRR_PHYS_MASK(0), %ecx
91 wrmsr
92 movl $MTRR_PHYS_MASK(1), %ecx
93 wrmsr
94 movl $LAPIC_BASE_MSR, %ecx
95 not %edx
96 movl %edx, %ebx
97 rdmsr
98 andl %ebx, %edx
99 andl $(~LAPIC_BASE_MSR_ADDR_MASK), %eax
100 orl $(LAPIC_DEFAULT_BASE | LAPIC_BASE_MSR_ENABLE), %eax
101 wrmsr
102
103bsp_init:
104
105 post_code(0x23)
106
107 /* Send INIT IPI to all excluding ourself. */
108 movl LAPIC(ICR), %edi
109 movl $(LAPIC_DEST_ALLBUT | LAPIC_INT_ASSERT | LAPIC_DM_INIT), %eax
1101: movl %eax, (%edi)
111 movl $0x30, %ecx
1122: pause
113 dec %ecx
114 jnz 2b
115 movl (%edi), %ecx
116 andl $LAPIC_ICR_BUSY, %ecx
117 jnz 1b
118
119 post_code(0x24)
120
121 movl $1, %eax
122 cpuid
123 btl $28, %edx
124 jnc sipi_complete
125 bswapl %ebx
126 movzx %bh, %edi
127 cmpb $1, %bh
128 jbe sipi_complete /* only one LAPIC ID in package */
129
130 movl $0, %eax
131 cpuid
132 movb $1, %bl
133 cmpl $4, %eax
134 jb cores_counted
135 movl $4, %eax
136 movl $0, %ecx
137 cpuid
138 shr $26, %eax
139 movb %al, %bl
140 inc %bl
141
142cores_counted:
143 movl %edi, %eax
144 divb %bl
145 cmpb $1, %al
146 jbe sipi_complete /* only LAPIC ID of a core */
147
148 /* For a hyper-threading processor, cache must not be disabled
149 * on an AP on the same physical package with the BSP.
150 */
151
152hyper_threading_cpu:
153
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300154 post_code(0x25)
155
156 /* Send Start IPI to all excluding ourself. */
157 movl LAPIC(ICR), %edi
Kyösti Mälkki34856572019-01-09 20:30:52 +0200158 movl $(LAPIC_DEST_ALLBUT | LAPIC_DM_STARTUP), %eax
159 orl $ap_sipi_vector_in_rom, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +03001601: movl %eax, (%edi)
161 movl $0x30, %ecx
1622: pause
163 dec %ecx
164 jnz 2b
165 movl (%edi), %ecx
166 andl $LAPIC_ICR_BUSY, %ecx
167 jnz 1b
168
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300169 post_code(0x26)
170
171 /* Wait for sibling CPU to start. */
1721: movl $(MTRR_PHYS_BASE(0)), %ecx
173 rdmsr
174 andl %eax, %eax
175 jnz sipi_complete
176
177 movl $0x30, %ecx
1782: pause
179 dec %ecx
180 jnz 2b
181 jmp 1b
182
183
184ap_init:
185 post_code(0x27)
186
187 /* Do not disable cache (so BSP can enable it). */
188 movl %cr0, %eax
189 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
190 movl %eax, %cr0
191
192 post_code(0x28)
193
194 /* MTRR registers are shared between HT siblings. */
195 movl $(MTRR_PHYS_BASE(0)), %ecx
196 movl $(1 << 12), %eax
197 xorl %edx, %edx
198 wrmsr
199
200 post_code(0x29)
201
202ap_halt:
203 cli
2041: hlt
205 jmp 1b
206
207
208
209sipi_complete:
210
211 post_code(0x2a)
212
213 /* Set Cache-as-RAM base address. */
214 movl $(MTRR_PHYS_BASE(0)), %ecx
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200215 movl $_car_mtrr_start, %eax
216 orl $MTRR_TYPE_WRBACK, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300217 xorl %edx, %edx
218 wrmsr
219
220 /* Set Cache-as-RAM mask. */
221 movl $(MTRR_PHYS_MASK(0)), %ecx
222 rdmsr
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200223 movl $_car_mtrr_mask, %eax
224 orl $MTRR_PHYS_MASK_VALID, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300225 wrmsr
226
227 post_code(0x2b)
228
229 /* Enable MTRR. */
230 movl $MTRR_DEF_TYPE_MSR, %ecx
231 rdmsr
232 orl $MTRR_DEF_TYPE_EN, %eax
233 wrmsr
234
235 /* Enable L2 cache Write-Back (WBINVD and FLUSH#).
236 *
237 * MSR is set when DisplayFamily_DisplayModel is one of:
238 * 06_0x, 06_17, 06_1C
239 *
240 * Description says this bit enables use of WBINVD and FLUSH#.
241 * Should this be set only after the system bus and/or memory
242 * controller can successfully handle write cycles?
243 */
244
245#define EAX_FAMILY(a) (a << 8) /* for family <= 0fH */
246#define EAX_MODEL(a) (((a & 0xf0) << 12) | ((a & 0xf) << 4))
247
248 movl $1, %eax
249 cpuid
250 movl %eax, %ebx
251 andl $EAX_FAMILY(0x0f), %eax
252 cmpl $EAX_FAMILY(0x06), %eax
253 jne no_msr_11e
254 movl %ebx, %eax
255 andl $EAX_MODEL(0xff), %eax
256 cmpl $EAX_MODEL(0x17), %eax
257 je has_msr_11e
258 cmpl $EAX_MODEL(0x1c), %eax
259 je has_msr_11e
260 andl $EAX_MODEL(0xf0), %eax
261 cmpl $EAX_MODEL(0x00), %eax
262 jne no_msr_11e
263has_msr_11e:
264 movl $0x11e, %ecx
265 rdmsr
266 orl $(1 << 8), %eax
267 wrmsr
268no_msr_11e:
269
270 post_code(0x2c)
271
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100272 /* Cache the whole rom to fetch microcode updates */
273 movl $MTRR_PHYS_BASE(1), %ecx
274 xorl %edx, %edx
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200275 movl $_rom_mtrr_base, %eax
276 orl $MTRR_TYPE_WRPROT, %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100277 wrmsr
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100278 movl $MTRR_PHYS_MASK(1), %ecx
279 rdmsr
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200280 movl $_rom_mtrr_mask, %eax
281 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100282 wrmsr
283
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300284 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
285 movl %cr0, %eax
286 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
287 invd
288 movl %eax, %cr0
289
Julius Wernercd49cce2019-03-05 16:53:33 -0800290#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100291 update_microcode:
292 /* put the return address in %esp */
293 movl $end_microcode_update, %esp
294 jmp update_bsp_microcode
295 end_microcode_update:
296#endif
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300297 post_code(0x2d)
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100298 /* Disable caching to change MTRR's. */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300299 movl %cr0, %eax
300 orl $CR0_CacheDisable, %eax
301 movl %eax, %cr0
302
Arthur Heymans7875dbd2018-06-16 20:01:47 +0200303 /*
304 * An unidentified combination of speculative reads and branch
305 * predictions inside WRPROT-cacheable memory can cause invalidation
306 * of cachelines and loss of stack on models based on NetBurst
307 * microarchitecture. Therefore disable WRPROT region entirely for
308 * all family F models.
309 */
310 movl $1, %eax
311 cpuid
312 cmp $0xf, %ah
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100313 jne cache_rom
Arthur Heymans7875dbd2018-06-16 20:01:47 +0200314
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100315disable_cache_rom:
316 movl $MTRR_PHYS_MASK(1), %ecx
317 rdmsr
318 andl $(~MTRR_PHYS_MASK_VALID), %eax
319 wrmsr
320 jmp fill_cache
321
322cache_rom:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300323 /* Enable cache for our code in Flash because we do XIP here */
324 movl $MTRR_PHYS_BASE(1), %ecx
325 xorl %edx, %edx
Kyösti Mälkkice9f4222018-06-25 18:53:36 +0300326 movl $_program, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200327 andl $_xip_mtrr_mask, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300328 orl $MTRR_TYPE_WRPROT, %eax
329 wrmsr
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300330 movl $MTRR_PHYS_MASK(1), %ecx
331 rdmsr
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200332 movl $_xip_mtrr_mask, %eax
333 orl $MTRR_PHYS_MASK_VALID, %eax
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300334 wrmsr
335
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100336fill_cache:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300337 post_code(0x2e)
338 /* Enable cache. */
339 movl %cr0, %eax
340 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100341 invd
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300342 movl %eax, %cr0
343
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100344 /* Clear the cache memory region. This will also fill up the cache. */
345 cld
346 xorl %eax, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200347 movl $_car_mtrr_start, %edi
348 movl $_car_mtrr_size, %ecx
349 shr $2, %ecx
Arthur Heymans95b3ba52019-01-09 12:24:58 +0100350 rep stosl
351
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300352 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100353 mov $_ecar_stack, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200354
355 /* Need to align stack to 16 bytes at call instruction. Account for
356 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200357 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200358 subl $4, %esp
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300359
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200360 /* push TSC and BIST to stack */
361 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100362 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200363 movd %mm2, %eax
364 pushl %eax /* tsc[63:32] */
365 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100366 pushl %eax /* tsc[31:0] */
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300367
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200368before_c_entry:
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300369 post_code(0x2f)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200370 call bootblock_c_entry_bist
Kyösti Mälkki6a8ce0d2018-05-17 17:22:51 +0300371
372 /* Should never see this postcode */
373 post_code(POST_DEAD_CODE)
374
375.Lhlt:
376 hlt
377 jmp .Lhlt
378
379fixed_mtrr_list:
380 .word MTRR_FIX_64K_00000
381 .word MTRR_FIX_16K_80000
382 .word MTRR_FIX_16K_A0000
383 .word MTRR_FIX_4K_C0000
384 .word MTRR_FIX_4K_C8000
385 .word MTRR_FIX_4K_D0000
386 .word MTRR_FIX_4K_D8000
387 .word MTRR_FIX_4K_E0000
388 .word MTRR_FIX_4K_E8000
389 .word MTRR_FIX_4K_F0000
390 .word MTRR_FIX_4K_F8000
391fixed_mtrr_list_size = . - fixed_mtrr_list
392
393_cache_as_ram_setup_end: