blob: 187b1ca9e2cc7b1b702e344c307a71462496958d [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Arthur Heymansdd4d8952018-06-03 12:04:26 +02002
Martin Rothc87ab012022-11-20 19:32:51 -07003#include <cpu/intel/post_codes.h>
Arthur Heymansdd4d8952018-06-03 12:04:26 +02004#include <cpu/x86/mtrr.h>
5#include <cpu/x86/cache.h>
6#include <cpu/x86/post_code.h>
7
Arthur Heymansdd4d8952018-06-03 12:04:26 +02008#define NoEvictMod_MSR 0x2e0
Arthur Heymans19e72732019-01-11 23:56:51 +01009#define BBL_CR_CTL3_MSR 0x11e
Arthur Heymansdd4d8952018-06-03 12:04:26 +020010
Kyösti Mälkki7522a8f2020-11-20 16:47:38 +020011.section .init
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020012.global bootblock_pre_c_entry
13
Patrick Rudolph49da0cf2020-09-24 18:57:43 +020014#include <cpu/intel/car/cache_as_ram_symbols.inc>
15
Arthur Heymansdd4d8952018-06-03 12:04:26 +020016.code32
17_cache_as_ram_setup:
18
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020019bootblock_pre_c_entry:
Arthur Heymans8e646e72018-06-05 11:19:22 +020020 movl $cache_as_ram, %esp /* return address */
21 jmp check_mtrr /* Check if CPU properly reset */
Arthur Heymans8e646e72018-06-05 11:19:22 +020022
Arthur Heymansdd4d8952018-06-03 12:04:26 +020023cache_as_ram:
lilacious40cb3fe2023-06-21 23:24:14 +020024 post_code(POSTCODE_BOOTBLOCK_CAR)
Arthur Heymansdd4d8952018-06-03 12:04:26 +020025
26 /* Send INIT IPI to all excluding ourself. */
27 movl $0x000C4500, %eax
28 movl $0xFEE00300, %esi
29 movl %eax, (%esi)
30
31 /* All CPUs need to be in Wait for SIPI state */
32wait_for_sipi:
33 movl (%esi), %eax
34 bt $12, %eax
35 jc wait_for_sipi
36
Martin Rothc87ab012022-11-20 19:32:51 -070037 post_code(POST_SOC_SET_DEF_MTRR_TYPE)
Arthur Heymansdd4d8952018-06-03 12:04:26 +020038 /* Clean-up MTRR_DEF_TYPE_MSR. */
39 movl $MTRR_DEF_TYPE_MSR, %ecx
40 xorl %eax, %eax
41 xorl %edx, %edx
42 wrmsr
43
Martin Rothc87ab012022-11-20 19:32:51 -070044 post_code(POST_SOC_CLEAR_FIXED_MTRRS)
Arthur Heymansc2ccc972018-06-03 12:09:52 +020045 /* Clear/disable fixed MTRRs */
Arthur Heymans2834d982022-11-08 15:06:42 +010046 mov $fixed_mtrr_list, %ebx
Arthur Heymansc2ccc972018-06-03 12:09:52 +020047 xor %eax, %eax
48 xor %edx, %edx
49
50clear_fixed_mtrr:
Arthur Heymans2834d982022-11-08 15:06:42 +010051 movzwl (%ebx), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +020052 wrmsr
Arthur Heymans2834d982022-11-08 15:06:42 +010053 add $2, %ebx
54 cmp $fixed_mtrr_list_end, %ebx
55 jl clear_fixed_mtrr
Arthur Heymansdd4d8952018-06-03 12:04:26 +020056
57 /* Zero out all variable range MTRRs. */
58 movl $MTRR_CAP_MSR, %ecx
59 rdmsr
60 andl $0xff, %eax
61 shl $1, %eax
62 movl %eax, %edi
63 movl $0x200, %ecx
64 xorl %eax, %eax
65 xorl %edx, %edx
66clear_var_mtrrs:
67 wrmsr
68 add $1, %ecx
69 dec %edi
70 jnz clear_var_mtrrs
71
Arthur Heymansc2ccc972018-06-03 12:09:52 +020072 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
73 movl $0x80000008, %eax
74 cpuid
75 movb %al, %cl
76 sub $32, %cl
77 movl $1, %edx
78 shl %cl, %edx
79 subl $1, %edx
80
81 /* Preload high word of address mask (in %edx) for Variable
82 * MTRRs 0 and 1.
83 */
84addrsize_set_high:
85 xorl %eax, %eax
86 movl $MTRR_PHYS_MASK(0), %ecx
87 wrmsr
88 movl $MTRR_PHYS_MASK(1), %ecx
89 wrmsr
90
Martin Rothc87ab012022-11-20 19:32:51 -070091 post_code(POST_SOC_SET_MTRR_BASE)
Arthur Heymansdd4d8952018-06-03 12:04:26 +020092 /* Set Cache-as-RAM base address. */
93 movl $(MTRR_PHYS_BASE(0)), %ecx
Patrick Rudolph49da0cf2020-09-24 18:57:43 +020094 movl car_mtrr_start, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020095 orl $MTRR_TYPE_WRBACK, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +020096 xorl %edx, %edx
97 wrmsr
98
Martin Rothc87ab012022-11-20 19:32:51 -070099 post_code(POST_SOC_SET_MTRR_MASK)
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200100 /* Set Cache-as-RAM mask. */
101 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200102 rdmsr
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200103 mov car_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200104 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200105 wrmsr
106
Arthur Heymans48bf7122019-01-05 17:18:11 +0100107 /* Enable cache for our code in Flash because we do XIP here */
108 movl $MTRR_PHYS_BASE(1), %ecx
109 xorl %edx, %edx
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200110 mov rom_mtrr_base, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200111 orl $MTRR_TYPE_WRPROT, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100112 wrmsr
113
114 movl $MTRR_PHYS_MASK(1), %ecx
115 rdmsr
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200116 mov rom_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200117 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100118 wrmsr
119
Martin Rothc87ab012022-11-20 19:32:51 -0700120 post_code(POST_SOC_ENABLE_MTRRS)
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200121
122 /* Enable MTRR. */
123 movl $MTRR_DEF_TYPE_MSR, %ecx
124 rdmsr
125 orl $MTRR_DEF_TYPE_EN, %eax
126 wrmsr
127
Julius Wernercd49cce2019-03-05 16:53:33 -0800128#if CONFIG(CPU_HAS_L2_ENABLE_MSR)
Arthur Heymans19e72732019-01-11 23:56:51 +0100129 /*
130 * Enable the L2 cache. Currently this assumes that this
131 * only affect socketed CPU's for which this is always valid,
132 * hence the static preprocesser.
133 */
134 movl $BBL_CR_CTL3_MSR, %ecx
135 rdmsr
136 orl $0x100, %eax
137 wrmsr
138#endif
139
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200140 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
141 movl %cr0, %eax
142 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
143 invd
144 movl %eax, %cr0
145
Julius Wernercd49cce2019-03-05 16:53:33 -0800146#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans48bf7122019-01-05 17:18:11 +0100147update_microcode:
148 /* put the return address in %esp */
149 movl $end_microcode_update, %esp
150 jmp update_bsp_microcode
151end_microcode_update:
152#endif
153 /* Disable caching to change MTRR's. */
154 movl %cr0, %eax
155 orl $CR0_CacheDisable, %eax
156 movl %eax, %cr0
157
158 /* Clear the mask valid to disable the MTRR */
159 movl $MTRR_PHYS_MASK(1), %ecx
160 rdmsr
161 andl $(~MTRR_PHYS_MASK_VALID), %eax
162 wrmsr
163
164 /* Enable cache. */
165 movl %cr0, %eax
166 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
167 invd
168 movl %eax, %cr0
169
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200170 /* enable the 'no eviction' mode */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100171 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200172 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100173 orl $1, %eax
174 andl $~2, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200175 wrmsr
176
177 /* Clear the cache memory region. This will also fill up the cache. */
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200178 cld
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200179 xorl %eax, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200180 movl $_car_mtrr_start, %edi
181 movl $_car_mtrr_size, %ecx
182 shr $2, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200183 rep stosl
184
185 /* enable the 'no eviction run' state */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100186 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200187 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100188 orl $3, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200189 wrmsr
190
Martin Rothc87ab012022-11-20 19:32:51 -0700191 post_code(POST_SOC_DISABLE_CACHE)
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200192 /* Enable Cache-as-RAM mode by disabling cache. */
193 movl %cr0, %eax
194 orl $CR0_CacheDisable, %eax
195 movl %eax, %cr0
196
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200197 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200198 rdmsr
Arthur Heymans48bf7122019-01-05 17:18:11 +0100199 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200200 wrmsr
201
Martin Rothc87ab012022-11-20 19:32:51 -0700202 post_code(POST_SOC_ENABLE_CACHE)
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200203 /* Enable cache. */
204 movl %cr0, %eax
205 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
206 movl %eax, %cr0
207
208 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100209 mov $_ecar_stack, %esp
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200210
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200211 /* Need to align stack to 16 bytes at call instruction. Account for
212 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200213 andl $0xfffffff0, %esp
Arthur Heymans348b79f2018-06-03 17:14:19 +0200214
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200215#if ENV_X86_64
216
217 #include <cpu/x86/64bit/entry64.inc>
218
219 movd %mm2, %rdi
220 shlq $32, %rdi
221 movd %mm1, %rsi
222 or %rsi, %rdi
223 movd %mm0, %rsi
224
225#else
226 subl $4, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200227 /* push TSC and BIST to stack */
228 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100229 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200230 movd %mm2, %eax
231 pushl %eax /* tsc[63:32] */
232 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100233 pushl %eax /* tsc[31:0] */
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200234#endif
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200235
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200236before_c_entry:
Martin Rothc87ab012022-11-20 19:32:51 -0700237 post_code(POST_BOOTBLOCK_BEFORE_C_ENTRY)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200238 call bootblock_c_entry_bist
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200239
240 /* Should never see this postcode */
lilacious40cb3fe2023-06-21 23:24:14 +0200241 post_code(POSTCODE_DEAD_CODE)
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200242
243
244.Lhlt:
245 hlt
246 jmp .Lhlt
247
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200248fixed_mtrr_list:
249 .word MTRR_FIX_64K_00000
250 .word MTRR_FIX_16K_80000
251 .word MTRR_FIX_16K_A0000
252 .word MTRR_FIX_4K_C0000
253 .word MTRR_FIX_4K_C8000
254 .word MTRR_FIX_4K_D0000
255 .word MTRR_FIX_4K_D8000
256 .word MTRR_FIX_4K_E0000
257 .word MTRR_FIX_4K_E8000
258 .word MTRR_FIX_4K_F0000
259 .word MTRR_FIX_4K_F8000
Arthur Heymans2834d982022-11-08 15:06:42 +0100260fixed_mtrr_list_end:
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200261
262_cache_as_ram_setup_end: