blob: cde1ca3d1549b61f47dc129fa90e54d919914bbc [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Arthur Heymansdd4d8952018-06-03 12:04:26 +02002
3#include <cpu/x86/mtrr.h>
4#include <cpu/x86/cache.h>
5#include <cpu/x86/post_code.h>
6
Arthur Heymansdd4d8952018-06-03 12:04:26 +02007#define NoEvictMod_MSR 0x2e0
Arthur Heymans19e72732019-01-11 23:56:51 +01008#define BBL_CR_CTL3_MSR 0x11e
Arthur Heymansdd4d8952018-06-03 12:04:26 +02009
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020010.global bootblock_pre_c_entry
11
Patrick Rudolph49da0cf2020-09-24 18:57:43 +020012#include <cpu/intel/car/cache_as_ram_symbols.inc>
13
Arthur Heymansdd4d8952018-06-03 12:04:26 +020014.code32
15_cache_as_ram_setup:
16
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020017bootblock_pre_c_entry:
Arthur Heymans8e646e72018-06-05 11:19:22 +020018 movl $cache_as_ram, %esp /* return address */
19 jmp check_mtrr /* Check if CPU properly reset */
Arthur Heymans8e646e72018-06-05 11:19:22 +020020
Arthur Heymansdd4d8952018-06-03 12:04:26 +020021cache_as_ram:
22 post_code(0x20)
23
24 /* Send INIT IPI to all excluding ourself. */
25 movl $0x000C4500, %eax
26 movl $0xFEE00300, %esi
27 movl %eax, (%esi)
28
29 /* All CPUs need to be in Wait for SIPI state */
30wait_for_sipi:
31 movl (%esi), %eax
32 bt $12, %eax
33 jc wait_for_sipi
34
35 post_code(0x21)
36 /* Clean-up MTRR_DEF_TYPE_MSR. */
37 movl $MTRR_DEF_TYPE_MSR, %ecx
38 xorl %eax, %eax
39 xorl %edx, %edx
40 wrmsr
41
42 post_code(0x22)
Arthur Heymansc2ccc972018-06-03 12:09:52 +020043 /* Clear/disable fixed MTRRs */
44 mov $fixed_mtrr_list_size, %ebx
45 xor %eax, %eax
46 xor %edx, %edx
47
48clear_fixed_mtrr:
49 add $-2, %ebx
50 movzwl fixed_mtrr_list(%ebx), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +020051 wrmsr
Arthur Heymansc2ccc972018-06-03 12:09:52 +020052 jnz clear_fixed_mtrr
Arthur Heymansdd4d8952018-06-03 12:04:26 +020053
54 /* Zero out all variable range MTRRs. */
55 movl $MTRR_CAP_MSR, %ecx
56 rdmsr
57 andl $0xff, %eax
58 shl $1, %eax
59 movl %eax, %edi
60 movl $0x200, %ecx
61 xorl %eax, %eax
62 xorl %edx, %edx
63clear_var_mtrrs:
64 wrmsr
65 add $1, %ecx
66 dec %edi
67 jnz clear_var_mtrrs
68
Arthur Heymansc2ccc972018-06-03 12:09:52 +020069 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
70 movl $0x80000008, %eax
71 cpuid
72 movb %al, %cl
73 sub $32, %cl
74 movl $1, %edx
75 shl %cl, %edx
76 subl $1, %edx
77
78 /* Preload high word of address mask (in %edx) for Variable
79 * MTRRs 0 and 1.
80 */
81addrsize_set_high:
82 xorl %eax, %eax
83 movl $MTRR_PHYS_MASK(0), %ecx
84 wrmsr
85 movl $MTRR_PHYS_MASK(1), %ecx
86 wrmsr
87
Arthur Heymansdd4d8952018-06-03 12:04:26 +020088 post_code(0x23)
89 /* Set Cache-as-RAM base address. */
90 movl $(MTRR_PHYS_BASE(0)), %ecx
Patrick Rudolph49da0cf2020-09-24 18:57:43 +020091 movl car_mtrr_start, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020092 orl $MTRR_TYPE_WRBACK, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +020093 xorl %edx, %edx
94 wrmsr
95
96 post_code(0x24)
97 /* Set Cache-as-RAM mask. */
98 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +020099 rdmsr
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200100 mov car_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200101 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200102 wrmsr
103
Arthur Heymans48bf7122019-01-05 17:18:11 +0100104 /* Enable cache for our code in Flash because we do XIP here */
105 movl $MTRR_PHYS_BASE(1), %ecx
106 xorl %edx, %edx
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200107 mov rom_mtrr_base, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200108 orl $MTRR_TYPE_WRPROT, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100109 wrmsr
110
111 movl $MTRR_PHYS_MASK(1), %ecx
112 rdmsr
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200113 mov rom_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200114 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100115 wrmsr
116
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200117 post_code(0x25)
118
119 /* Enable MTRR. */
120 movl $MTRR_DEF_TYPE_MSR, %ecx
121 rdmsr
122 orl $MTRR_DEF_TYPE_EN, %eax
123 wrmsr
124
Julius Wernercd49cce2019-03-05 16:53:33 -0800125#if CONFIG(CPU_HAS_L2_ENABLE_MSR)
Arthur Heymans19e72732019-01-11 23:56:51 +0100126 /*
127 * Enable the L2 cache. Currently this assumes that this
128 * only affect socketed CPU's for which this is always valid,
129 * hence the static preprocesser.
130 */
131 movl $BBL_CR_CTL3_MSR, %ecx
132 rdmsr
133 orl $0x100, %eax
134 wrmsr
135#endif
136
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200137 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
138 movl %cr0, %eax
139 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
140 invd
141 movl %eax, %cr0
142
Julius Wernercd49cce2019-03-05 16:53:33 -0800143#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans48bf7122019-01-05 17:18:11 +0100144update_microcode:
145 /* put the return address in %esp */
146 movl $end_microcode_update, %esp
147 jmp update_bsp_microcode
148end_microcode_update:
149#endif
150 /* Disable caching to change MTRR's. */
151 movl %cr0, %eax
152 orl $CR0_CacheDisable, %eax
153 movl %eax, %cr0
154
155 /* Clear the mask valid to disable the MTRR */
156 movl $MTRR_PHYS_MASK(1), %ecx
157 rdmsr
158 andl $(~MTRR_PHYS_MASK_VALID), %eax
159 wrmsr
160
161 /* Enable cache. */
162 movl %cr0, %eax
163 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
164 invd
165 movl %eax, %cr0
166
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200167 /* enable the 'no eviction' mode */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100168 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200169 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100170 orl $1, %eax
171 andl $~2, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200172 wrmsr
173
174 /* Clear the cache memory region. This will also fill up the cache. */
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200175 cld
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200176 xorl %eax, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200177 movl $_car_mtrr_start, %edi
178 movl $_car_mtrr_size, %ecx
179 shr $2, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200180 rep stosl
181
182 /* enable the 'no eviction run' state */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100183 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200184 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100185 orl $3, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200186 wrmsr
187
188 post_code(0x26)
189 /* Enable Cache-as-RAM mode by disabling cache. */
190 movl %cr0, %eax
191 orl $CR0_CacheDisable, %eax
192 movl %eax, %cr0
193
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200194 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200195 rdmsr
Arthur Heymans48bf7122019-01-05 17:18:11 +0100196 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200197 wrmsr
198
199 post_code(0x28)
200 /* Enable cache. */
201 movl %cr0, %eax
202 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
203 movl %eax, %cr0
204
205 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100206 mov $_ecar_stack, %esp
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200207
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200208 /* Need to align stack to 16 bytes at call instruction. Account for
209 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200210 andl $0xfffffff0, %esp
Arthur Heymans348b79f2018-06-03 17:14:19 +0200211
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200212#if ENV_X86_64
213
214 #include <cpu/x86/64bit/entry64.inc>
215
216 movd %mm2, %rdi
217 shlq $32, %rdi
218 movd %mm1, %rsi
219 or %rsi, %rdi
220 movd %mm0, %rsi
221
222#else
223 subl $4, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200224 /* push TSC and BIST to stack */
225 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100226 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200227 movd %mm2, %eax
228 pushl %eax /* tsc[63:32] */
229 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100230 pushl %eax /* tsc[31:0] */
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200231#endif
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200232
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200233before_c_entry:
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200234 post_code(0x29)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200235 call bootblock_c_entry_bist
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200236
237 /* Should never see this postcode */
238 post_code(POST_DEAD_CODE)
239
240
241.Lhlt:
242 hlt
243 jmp .Lhlt
244
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200245fixed_mtrr_list:
246 .word MTRR_FIX_64K_00000
247 .word MTRR_FIX_16K_80000
248 .word MTRR_FIX_16K_A0000
249 .word MTRR_FIX_4K_C0000
250 .word MTRR_FIX_4K_C8000
251 .word MTRR_FIX_4K_D0000
252 .word MTRR_FIX_4K_D8000
253 .word MTRR_FIX_4K_E0000
254 .word MTRR_FIX_4K_E8000
255 .word MTRR_FIX_4K_F0000
256 .word MTRR_FIX_4K_F8000
257fixed_mtrr_list_size = . - fixed_mtrr_list
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200258
259_cache_as_ram_setup_end: