blob: 0451bb4dd5e5957a0fd7b9e96378eccc70a53fc2 [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Arthur Heymansdd4d8952018-06-03 12:04:26 +02002
3#include <cpu/x86/mtrr.h>
4#include <cpu/x86/cache.h>
5#include <cpu/x86/post_code.h>
6
Arthur Heymansdd4d8952018-06-03 12:04:26 +02007#define NoEvictMod_MSR 0x2e0
Arthur Heymans19e72732019-01-11 23:56:51 +01008#define BBL_CR_CTL3_MSR 0x11e
Arthur Heymansdd4d8952018-06-03 12:04:26 +02009
Kyösti Mälkki7522a8f2020-11-20 16:47:38 +020010.section .init
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020011.global bootblock_pre_c_entry
12
Patrick Rudolph49da0cf2020-09-24 18:57:43 +020013#include <cpu/intel/car/cache_as_ram_symbols.inc>
14
Arthur Heymansdd4d8952018-06-03 12:04:26 +020015.code32
16_cache_as_ram_setup:
17
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020018bootblock_pre_c_entry:
Arthur Heymans8e646e72018-06-05 11:19:22 +020019 movl $cache_as_ram, %esp /* return address */
20 jmp check_mtrr /* Check if CPU properly reset */
Arthur Heymans8e646e72018-06-05 11:19:22 +020021
Arthur Heymansdd4d8952018-06-03 12:04:26 +020022cache_as_ram:
23 post_code(0x20)
24
25 /* Send INIT IPI to all excluding ourself. */
26 movl $0x000C4500, %eax
27 movl $0xFEE00300, %esi
28 movl %eax, (%esi)
29
30 /* All CPUs need to be in Wait for SIPI state */
31wait_for_sipi:
32 movl (%esi), %eax
33 bt $12, %eax
34 jc wait_for_sipi
35
36 post_code(0x21)
37 /* Clean-up MTRR_DEF_TYPE_MSR. */
38 movl $MTRR_DEF_TYPE_MSR, %ecx
39 xorl %eax, %eax
40 xorl %edx, %edx
41 wrmsr
42
43 post_code(0x22)
Arthur Heymansc2ccc972018-06-03 12:09:52 +020044 /* Clear/disable fixed MTRRs */
45 mov $fixed_mtrr_list_size, %ebx
46 xor %eax, %eax
47 xor %edx, %edx
48
49clear_fixed_mtrr:
50 add $-2, %ebx
51 movzwl fixed_mtrr_list(%ebx), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +020052 wrmsr
Arthur Heymansc2ccc972018-06-03 12:09:52 +020053 jnz clear_fixed_mtrr
Arthur Heymansdd4d8952018-06-03 12:04:26 +020054
55 /* Zero out all variable range MTRRs. */
56 movl $MTRR_CAP_MSR, %ecx
57 rdmsr
58 andl $0xff, %eax
59 shl $1, %eax
60 movl %eax, %edi
61 movl $0x200, %ecx
62 xorl %eax, %eax
63 xorl %edx, %edx
64clear_var_mtrrs:
65 wrmsr
66 add $1, %ecx
67 dec %edi
68 jnz clear_var_mtrrs
69
Arthur Heymansc2ccc972018-06-03 12:09:52 +020070 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
71 movl $0x80000008, %eax
72 cpuid
73 movb %al, %cl
74 sub $32, %cl
75 movl $1, %edx
76 shl %cl, %edx
77 subl $1, %edx
78
79 /* Preload high word of address mask (in %edx) for Variable
80 * MTRRs 0 and 1.
81 */
82addrsize_set_high:
83 xorl %eax, %eax
84 movl $MTRR_PHYS_MASK(0), %ecx
85 wrmsr
86 movl $MTRR_PHYS_MASK(1), %ecx
87 wrmsr
88
Arthur Heymansdd4d8952018-06-03 12:04:26 +020089 post_code(0x23)
90 /* Set Cache-as-RAM base address. */
91 movl $(MTRR_PHYS_BASE(0)), %ecx
Patrick Rudolph49da0cf2020-09-24 18:57:43 +020092 movl car_mtrr_start, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +020093 orl $MTRR_TYPE_WRBACK, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +020094 xorl %edx, %edx
95 wrmsr
96
97 post_code(0x24)
98 /* Set Cache-as-RAM mask. */
99 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200100 rdmsr
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200101 mov car_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200102 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200103 wrmsr
104
Arthur Heymans48bf7122019-01-05 17:18:11 +0100105 /* Enable cache for our code in Flash because we do XIP here */
106 movl $MTRR_PHYS_BASE(1), %ecx
107 xorl %edx, %edx
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200108 mov rom_mtrr_base, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200109 orl $MTRR_TYPE_WRPROT, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100110 wrmsr
111
112 movl $MTRR_PHYS_MASK(1), %ecx
113 rdmsr
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200114 mov rom_mtrr_mask, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200115 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100116 wrmsr
117
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200118 post_code(0x25)
119
120 /* Enable MTRR. */
121 movl $MTRR_DEF_TYPE_MSR, %ecx
122 rdmsr
123 orl $MTRR_DEF_TYPE_EN, %eax
124 wrmsr
125
Julius Wernercd49cce2019-03-05 16:53:33 -0800126#if CONFIG(CPU_HAS_L2_ENABLE_MSR)
Arthur Heymans19e72732019-01-11 23:56:51 +0100127 /*
128 * Enable the L2 cache. Currently this assumes that this
129 * only affect socketed CPU's for which this is always valid,
130 * hence the static preprocesser.
131 */
132 movl $BBL_CR_CTL3_MSR, %ecx
133 rdmsr
134 orl $0x100, %eax
135 wrmsr
136#endif
137
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200138 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
139 movl %cr0, %eax
140 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
141 invd
142 movl %eax, %cr0
143
Julius Wernercd49cce2019-03-05 16:53:33 -0800144#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans48bf7122019-01-05 17:18:11 +0100145update_microcode:
146 /* put the return address in %esp */
147 movl $end_microcode_update, %esp
148 jmp update_bsp_microcode
149end_microcode_update:
150#endif
151 /* Disable caching to change MTRR's. */
152 movl %cr0, %eax
153 orl $CR0_CacheDisable, %eax
154 movl %eax, %cr0
155
156 /* Clear the mask valid to disable the MTRR */
157 movl $MTRR_PHYS_MASK(1), %ecx
158 rdmsr
159 andl $(~MTRR_PHYS_MASK_VALID), %eax
160 wrmsr
161
162 /* Enable cache. */
163 movl %cr0, %eax
164 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
165 invd
166 movl %eax, %cr0
167
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200168 /* enable the 'no eviction' mode */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100169 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200170 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100171 orl $1, %eax
172 andl $~2, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200173 wrmsr
174
175 /* Clear the cache memory region. This will also fill up the cache. */
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200176 cld
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200177 xorl %eax, %eax
Kyösti Mälkkidc6bb6c2019-11-08 00:08:55 +0200178 movl $_car_mtrr_start, %edi
179 movl $_car_mtrr_size, %ecx
180 shr $2, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200181 rep stosl
182
183 /* enable the 'no eviction run' state */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100184 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200185 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100186 orl $3, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200187 wrmsr
188
189 post_code(0x26)
190 /* Enable Cache-as-RAM mode by disabling cache. */
191 movl %cr0, %eax
192 orl $CR0_CacheDisable, %eax
193 movl %eax, %cr0
194
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200195 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200196 rdmsr
Arthur Heymans48bf7122019-01-05 17:18:11 +0100197 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200198 wrmsr
199
200 post_code(0x28)
201 /* Enable cache. */
202 movl %cr0, %eax
203 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
204 movl %eax, %cr0
205
206 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100207 mov $_ecar_stack, %esp
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200208
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200209 /* Need to align stack to 16 bytes at call instruction. Account for
210 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200211 andl $0xfffffff0, %esp
Arthur Heymans348b79f2018-06-03 17:14:19 +0200212
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200213#if ENV_X86_64
214
215 #include <cpu/x86/64bit/entry64.inc>
216
217 movd %mm2, %rdi
218 shlq $32, %rdi
219 movd %mm1, %rsi
220 or %rsi, %rdi
221 movd %mm0, %rsi
222
223#else
224 subl $4, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200225 /* push TSC and BIST to stack */
226 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100227 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200228 movd %mm2, %eax
229 pushl %eax /* tsc[63:32] */
230 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100231 pushl %eax /* tsc[31:0] */
Patrick Rudolph49da0cf2020-09-24 18:57:43 +0200232#endif
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200233
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200234before_c_entry:
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200235 post_code(0x29)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200236 call bootblock_c_entry_bist
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200237
238 /* Should never see this postcode */
239 post_code(POST_DEAD_CODE)
240
241
242.Lhlt:
243 hlt
244 jmp .Lhlt
245
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200246fixed_mtrr_list:
247 .word MTRR_FIX_64K_00000
248 .word MTRR_FIX_16K_80000
249 .word MTRR_FIX_16K_A0000
250 .word MTRR_FIX_4K_C0000
251 .word MTRR_FIX_4K_C8000
252 .word MTRR_FIX_4K_D0000
253 .word MTRR_FIX_4K_D8000
254 .word MTRR_FIX_4K_E0000
255 .word MTRR_FIX_4K_E8000
256 .word MTRR_FIX_4K_F0000
257 .word MTRR_FIX_4K_F8000
258fixed_mtrr_list_size = . - fixed_mtrr_list
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200259
260_cache_as_ram_setup_end: