blob: d08736585d077cd8242dfd0e463a32130b7e274c [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Arthur Heymansdd4d8952018-06-03 12:04:26 +02002
3#include <cpu/x86/mtrr.h>
4#include <cpu/x86/cache.h>
5#include <cpu/x86/post_code.h>
6
Arthur Heymansdd4d8952018-06-03 12:04:26 +02007#define NoEvictMod_MSR 0x2e0
Arthur Heymans19e72732019-01-11 23:56:51 +01008#define BBL_CR_CTL3_MSR 0x11e
Arthur Heymansdd4d8952018-06-03 12:04:26 +02009
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020010.global bootblock_pre_c_entry
11
Arthur Heymansdd4d8952018-06-03 12:04:26 +020012.code32
13_cache_as_ram_setup:
14
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020015bootblock_pre_c_entry:
Arthur Heymans8e646e72018-06-05 11:19:22 +020016 movl $cache_as_ram, %esp /* return address */
17 jmp check_mtrr /* Check if CPU properly reset */
Arthur Heymans8e646e72018-06-05 11:19:22 +020018
Arthur Heymansdd4d8952018-06-03 12:04:26 +020019cache_as_ram:
20 post_code(0x20)
21
22 /* Send INIT IPI to all excluding ourself. */
23 movl $0x000C4500, %eax
24 movl $0xFEE00300, %esi
25 movl %eax, (%esi)
26
27 /* All CPUs need to be in Wait for SIPI state */
28wait_for_sipi:
29 movl (%esi), %eax
30 bt $12, %eax
31 jc wait_for_sipi
32
33 post_code(0x21)
34 /* Clean-up MTRR_DEF_TYPE_MSR. */
35 movl $MTRR_DEF_TYPE_MSR, %ecx
36 xorl %eax, %eax
37 xorl %edx, %edx
38 wrmsr
39
40 post_code(0x22)
Arthur Heymansc2ccc972018-06-03 12:09:52 +020041 /* Clear/disable fixed MTRRs */
42 mov $fixed_mtrr_list_size, %ebx
43 xor %eax, %eax
44 xor %edx, %edx
45
46clear_fixed_mtrr:
47 add $-2, %ebx
48 movzwl fixed_mtrr_list(%ebx), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +020049 wrmsr
Arthur Heymansc2ccc972018-06-03 12:09:52 +020050 jnz clear_fixed_mtrr
Arthur Heymansdd4d8952018-06-03 12:04:26 +020051
52 /* Zero out all variable range MTRRs. */
53 movl $MTRR_CAP_MSR, %ecx
54 rdmsr
55 andl $0xff, %eax
56 shl $1, %eax
57 movl %eax, %edi
58 movl $0x200, %ecx
59 xorl %eax, %eax
60 xorl %edx, %edx
61clear_var_mtrrs:
62 wrmsr
63 add $1, %ecx
64 dec %edi
65 jnz clear_var_mtrrs
66
Arthur Heymansc2ccc972018-06-03 12:09:52 +020067 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
68 movl $0x80000008, %eax
69 cpuid
70 movb %al, %cl
71 sub $32, %cl
72 movl $1, %edx
73 shl %cl, %edx
74 subl $1, %edx
75
76 /* Preload high word of address mask (in %edx) for Variable
77 * MTRRs 0 and 1.
78 */
79addrsize_set_high:
80 xorl %eax, %eax
81 movl $MTRR_PHYS_MASK(0), %ecx
82 wrmsr
83 movl $MTRR_PHYS_MASK(1), %ecx
84 wrmsr
85
86
Arthur Heymansdd4d8952018-06-03 12:04:26 +020087 post_code(0x23)
88 /* Set Cache-as-RAM base address. */
89 movl $(MTRR_PHYS_BASE(0)), %ecx
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020090 movl $_car_mtrr_start, %eax
91 orl $MTRR_TYPE_WRBACK, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +020092 xorl %edx, %edx
93 wrmsr
94
95 post_code(0x24)
96 /* Set Cache-as-RAM mask. */
97 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +020098 rdmsr
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020099 movl $_car_mtrr_mask, %eax
100 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200101 wrmsr
102
Arthur Heymans48bf7122019-01-05 17:18:11 +0100103 /* Enable cache for our code in Flash because we do XIP here */
104 movl $MTRR_PHYS_BASE(1), %ecx
105 xorl %edx, %edx
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200106 movl $_rom_mtrr_base, %eax
107 orl $MTRR_TYPE_WRPROT, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100108 wrmsr
109
110 movl $MTRR_PHYS_MASK(1), %ecx
111 rdmsr
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200112 movl $_rom_mtrr_mask, %eax
113 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100114 wrmsr
115
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200116 post_code(0x25)
117
118 /* Enable MTRR. */
119 movl $MTRR_DEF_TYPE_MSR, %ecx
120 rdmsr
121 orl $MTRR_DEF_TYPE_EN, %eax
122 wrmsr
123
Julius Wernercd49cce2019-03-05 16:53:33 -0800124#if CONFIG(CPU_HAS_L2_ENABLE_MSR)
Arthur Heymans19e72732019-01-11 23:56:51 +0100125 /*
126 * Enable the L2 cache. Currently this assumes that this
127 * only affect socketed CPU's for which this is always valid,
128 * hence the static preprocesser.
129 */
130 movl $BBL_CR_CTL3_MSR, %ecx
131 rdmsr
132 orl $0x100, %eax
133 wrmsr
134#endif
135
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200136 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
137 movl %cr0, %eax
138 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
139 invd
140 movl %eax, %cr0
141
Julius Wernercd49cce2019-03-05 16:53:33 -0800142#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans48bf7122019-01-05 17:18:11 +0100143update_microcode:
144 /* put the return address in %esp */
145 movl $end_microcode_update, %esp
146 jmp update_bsp_microcode
147end_microcode_update:
148#endif
149 /* Disable caching to change MTRR's. */
150 movl %cr0, %eax
151 orl $CR0_CacheDisable, %eax
152 movl %eax, %cr0
153
154 /* Clear the mask valid to disable the MTRR */
155 movl $MTRR_PHYS_MASK(1), %ecx
156 rdmsr
157 andl $(~MTRR_PHYS_MASK_VALID), %eax
158 wrmsr
159
160 /* Enable cache. */
161 movl %cr0, %eax
162 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
163 invd
164 movl %eax, %cr0
165
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200166 /* enable the 'no eviction' mode */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100167 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200168 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100169 orl $1, %eax
170 andl $~2, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200171 wrmsr
172
173 /* Clear the cache memory region. This will also fill up the cache. */
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200174 cld
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200175 xorl %eax, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200176 movl $_car_mtrr_start, %edi
177 movl $_car_mtrr_size, %ecx
178 shr $2, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200179 rep stosl
180
181 /* enable the 'no eviction run' state */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100182 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200183 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100184 orl $3, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200185 wrmsr
186
187 post_code(0x26)
188 /* Enable Cache-as-RAM mode by disabling cache. */
189 movl %cr0, %eax
190 orl $CR0_CacheDisable, %eax
191 movl %eax, %cr0
192
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200193 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200194 rdmsr
Arthur Heymans48bf7122019-01-05 17:18:11 +0100195 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200196 wrmsr
197
198 post_code(0x28)
199 /* Enable cache. */
200 movl %cr0, %eax
201 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
202 movl %eax, %cr0
203
204 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100205 mov $_ecar_stack, %esp
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200206
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200207 /* Need to align stack to 16 bytes at call instruction. Account for
208 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200209 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200210 subl $4, %esp
Arthur Heymans348b79f2018-06-03 17:14:19 +0200211
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200212 /* push TSC and BIST to stack */
213 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100214 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200215 movd %mm2, %eax
216 pushl %eax /* tsc[63:32] */
217 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100218 pushl %eax /* tsc[31:0] */
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200219
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200220before_c_entry:
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200221 post_code(0x29)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200222 call bootblock_c_entry_bist
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200223
224 /* Should never see this postcode */
225 post_code(POST_DEAD_CODE)
226
227
228.Lhlt:
229 hlt
230 jmp .Lhlt
231
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200232fixed_mtrr_list:
233 .word MTRR_FIX_64K_00000
234 .word MTRR_FIX_16K_80000
235 .word MTRR_FIX_16K_A0000
236 .word MTRR_FIX_4K_C0000
237 .word MTRR_FIX_4K_C8000
238 .word MTRR_FIX_4K_D0000
239 .word MTRR_FIX_4K_D8000
240 .word MTRR_FIX_4K_E0000
241 .word MTRR_FIX_4K_E8000
242 .word MTRR_FIX_4K_F0000
243 .word MTRR_FIX_4K_F8000
244fixed_mtrr_list_size = . - fixed_mtrr_list
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200245
246_cache_as_ram_setup_end: