blob: d0ec8f14c4a1e716ca763311e1747fc1c387c8b8 [file] [log] [blame]
Angel Ponsf23ae0b2020-04-02 23:48:12 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Arthur Heymansdd4d8952018-06-03 12:04:26 +02002
3#include <cpu/x86/mtrr.h>
4#include <cpu/x86/cache.h>
5#include <cpu/x86/post_code.h>
6
7#define CACHE_AS_RAM_SIZE (CONFIG_DCACHE_RAM_SIZE \
8 + CONFIG_DCACHE_RAM_MRC_VAR_SIZE)
9#define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
10
Arthur Heymansdd4d8952018-06-03 12:04:26 +020011#define NoEvictMod_MSR 0x2e0
Arthur Heymans19e72732019-01-11 23:56:51 +010012#define BBL_CR_CTL3_MSR 0x11e
Arthur Heymansdd4d8952018-06-03 12:04:26 +020013
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020014.global bootblock_pre_c_entry
15
Arthur Heymansdd4d8952018-06-03 12:04:26 +020016.code32
17_cache_as_ram_setup:
18
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020019bootblock_pre_c_entry:
Arthur Heymans8e646e72018-06-05 11:19:22 +020020 movl $cache_as_ram, %esp /* return address */
21 jmp check_mtrr /* Check if CPU properly reset */
Arthur Heymans8e646e72018-06-05 11:19:22 +020022
Arthur Heymansdd4d8952018-06-03 12:04:26 +020023cache_as_ram:
24 post_code(0x20)
25
26 /* Send INIT IPI to all excluding ourself. */
27 movl $0x000C4500, %eax
28 movl $0xFEE00300, %esi
29 movl %eax, (%esi)
30
31 /* All CPUs need to be in Wait for SIPI state */
32wait_for_sipi:
33 movl (%esi), %eax
34 bt $12, %eax
35 jc wait_for_sipi
36
37 post_code(0x21)
38 /* Clean-up MTRR_DEF_TYPE_MSR. */
39 movl $MTRR_DEF_TYPE_MSR, %ecx
40 xorl %eax, %eax
41 xorl %edx, %edx
42 wrmsr
43
44 post_code(0x22)
Arthur Heymansc2ccc972018-06-03 12:09:52 +020045 /* Clear/disable fixed MTRRs */
46 mov $fixed_mtrr_list_size, %ebx
47 xor %eax, %eax
48 xor %edx, %edx
49
50clear_fixed_mtrr:
51 add $-2, %ebx
52 movzwl fixed_mtrr_list(%ebx), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +020053 wrmsr
Arthur Heymansc2ccc972018-06-03 12:09:52 +020054 jnz clear_fixed_mtrr
Arthur Heymansdd4d8952018-06-03 12:04:26 +020055
56 /* Zero out all variable range MTRRs. */
57 movl $MTRR_CAP_MSR, %ecx
58 rdmsr
59 andl $0xff, %eax
60 shl $1, %eax
61 movl %eax, %edi
62 movl $0x200, %ecx
63 xorl %eax, %eax
64 xorl %edx, %edx
65clear_var_mtrrs:
66 wrmsr
67 add $1, %ecx
68 dec %edi
69 jnz clear_var_mtrrs
70
Arthur Heymansc2ccc972018-06-03 12:09:52 +020071 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
72 movl $0x80000008, %eax
73 cpuid
74 movb %al, %cl
75 sub $32, %cl
76 movl $1, %edx
77 shl %cl, %edx
78 subl $1, %edx
79
80 /* Preload high word of address mask (in %edx) for Variable
81 * MTRRs 0 and 1.
82 */
83addrsize_set_high:
84 xorl %eax, %eax
85 movl $MTRR_PHYS_MASK(0), %ecx
86 wrmsr
87 movl $MTRR_PHYS_MASK(1), %ecx
88 wrmsr
89
90
Arthur Heymansdd4d8952018-06-03 12:04:26 +020091 post_code(0x23)
92 /* Set Cache-as-RAM base address. */
93 movl $(MTRR_PHYS_BASE(0)), %ecx
94 movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
95 xorl %edx, %edx
96 wrmsr
97
98 post_code(0x24)
99 /* Set Cache-as-RAM mask. */
100 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200101 rdmsr
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200102 movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200103 wrmsr
104
Arthur Heymans48bf7122019-01-05 17:18:11 +0100105 /* Enable cache for our code in Flash because we do XIP here */
106 movl $MTRR_PHYS_BASE(1), %ecx
107 xorl %edx, %edx
Arthur Heymanseeedf832019-02-08 16:27:35 +0100108 movl $(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax
Arthur Heymans48bf7122019-01-05 17:18:11 +0100109 wrmsr
110
111 movl $MTRR_PHYS_MASK(1), %ecx
112 rdmsr
113 movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
114 wrmsr
115
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200116 post_code(0x25)
117
118 /* Enable MTRR. */
119 movl $MTRR_DEF_TYPE_MSR, %ecx
120 rdmsr
121 orl $MTRR_DEF_TYPE_EN, %eax
122 wrmsr
123
Julius Wernercd49cce2019-03-05 16:53:33 -0800124#if CONFIG(CPU_HAS_L2_ENABLE_MSR)
Arthur Heymans19e72732019-01-11 23:56:51 +0100125 /*
126 * Enable the L2 cache. Currently this assumes that this
127 * only affect socketed CPU's for which this is always valid,
128 * hence the static preprocesser.
129 */
130 movl $BBL_CR_CTL3_MSR, %ecx
131 rdmsr
132 orl $0x100, %eax
133 wrmsr
134#endif
135
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200136 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
137 movl %cr0, %eax
138 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
139 invd
140 movl %eax, %cr0
141
Julius Wernercd49cce2019-03-05 16:53:33 -0800142#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
Arthur Heymans48bf7122019-01-05 17:18:11 +0100143update_microcode:
144 /* put the return address in %esp */
145 movl $end_microcode_update, %esp
146 jmp update_bsp_microcode
147end_microcode_update:
148#endif
149 /* Disable caching to change MTRR's. */
150 movl %cr0, %eax
151 orl $CR0_CacheDisable, %eax
152 movl %eax, %cr0
153
154 /* Clear the mask valid to disable the MTRR */
155 movl $MTRR_PHYS_MASK(1), %ecx
156 rdmsr
157 andl $(~MTRR_PHYS_MASK_VALID), %eax
158 wrmsr
159
160 /* Enable cache. */
161 movl %cr0, %eax
162 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
163 invd
164 movl %eax, %cr0
165
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200166 /* enable the 'no eviction' mode */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100167 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200168 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100169 orl $1, %eax
170 andl $~2, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200171 wrmsr
172
173 /* Clear the cache memory region. This will also fill up the cache. */
174 movl $CACHE_AS_RAM_BASE, %esi
175 movl %esi, %edi
176 movl $(CACHE_AS_RAM_SIZE >> 2), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200177 xorl %eax, %eax
178 rep stosl
179
180 /* enable the 'no eviction run' state */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100181 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200182 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100183 orl $3, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200184 wrmsr
185
186 post_code(0x26)
187 /* Enable Cache-as-RAM mode by disabling cache. */
188 movl %cr0, %eax
189 orl $CR0_CacheDisable, %eax
190 movl %eax, %cr0
191
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200192 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200193 rdmsr
Arthur Heymans48bf7122019-01-05 17:18:11 +0100194 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200195 wrmsr
196
197 post_code(0x28)
198 /* Enable cache. */
199 movl %cr0, %eax
200 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
201 movl %eax, %cr0
202
203 /* Setup the stack. */
Arthur Heymansdf9cdcf2019-11-09 06:50:20 +0100204 mov $_ecar_stack, %esp
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200205
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200206 /* Need to align stack to 16 bytes at call instruction. Account for
207 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200208 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200209 subl $4, %esp
Arthur Heymans348b79f2018-06-03 17:14:19 +0200210
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200211 /* push TSC and BIST to stack */
212 movd %mm0, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100213 pushl %eax /* BIST */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200214 movd %mm2, %eax
215 pushl %eax /* tsc[63:32] */
216 movd %mm1, %eax
Elyes HAOUAS87930b32019-01-16 12:41:57 +0100217 pushl %eax /* tsc[31:0] */
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200218
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200219before_c_entry:
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200220 post_code(0x29)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200221 call bootblock_c_entry_bist
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200222
223 /* Should never see this postcode */
224 post_code(POST_DEAD_CODE)
225
226
227.Lhlt:
228 hlt
229 jmp .Lhlt
230
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200231fixed_mtrr_list:
232 .word MTRR_FIX_64K_00000
233 .word MTRR_FIX_16K_80000
234 .word MTRR_FIX_16K_A0000
235 .word MTRR_FIX_4K_C0000
236 .word MTRR_FIX_4K_C8000
237 .word MTRR_FIX_4K_D0000
238 .word MTRR_FIX_4K_D8000
239 .word MTRR_FIX_4K_E0000
240 .word MTRR_FIX_4K_E8000
241 .word MTRR_FIX_4K_F0000
242 .word MTRR_FIX_4K_F8000
243fixed_mtrr_list_size = . - fixed_mtrr_list
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200244
245_cache_as_ram_setup_end: