blob: d2beaa7d5ad0237b5065130a69e23bd823395776 [file] [log] [blame]
Arthur Heymansdd4d8952018-06-03 12:04:26 +02001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2000,2007 Ronald G. Minnich <rminnich@gmail.com>
5 * Copyright (C) 2007-2008 coresystems GmbH
Arthur Heymansc2ccc972018-06-03 12:09:52 +02006 * Copyright (C) 2012 Kyösti Mälkki <kyosti.malkki@gmail.com>
Arthur Heymansdd4d8952018-06-03 12:04:26 +02007 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; version 2 of the License.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 */
17
18#include <cpu/x86/mtrr.h>
19#include <cpu/x86/cache.h>
20#include <cpu/x86/post_code.h>
21
22#define CACHE_AS_RAM_SIZE (CONFIG_DCACHE_RAM_SIZE \
23 + CONFIG_DCACHE_RAM_MRC_VAR_SIZE)
24#define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
25
Arthur Heymansdd4d8952018-06-03 12:04:26 +020026#define NoEvictMod_MSR 0x2e0
27
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020028.global bootblock_pre_c_entry
29
Arthur Heymansdd4d8952018-06-03 12:04:26 +020030.code32
31_cache_as_ram_setup:
32
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +020033bootblock_pre_c_entry:
Arthur Heymansdd4d8952018-06-03 12:04:26 +020034
35cache_as_ram:
36 post_code(0x20)
37
38 /* Send INIT IPI to all excluding ourself. */
39 movl $0x000C4500, %eax
40 movl $0xFEE00300, %esi
41 movl %eax, (%esi)
42
43 /* All CPUs need to be in Wait for SIPI state */
44wait_for_sipi:
45 movl (%esi), %eax
46 bt $12, %eax
47 jc wait_for_sipi
48
49 post_code(0x21)
50 /* Clean-up MTRR_DEF_TYPE_MSR. */
51 movl $MTRR_DEF_TYPE_MSR, %ecx
52 xorl %eax, %eax
53 xorl %edx, %edx
54 wrmsr
55
56 post_code(0x22)
Arthur Heymansc2ccc972018-06-03 12:09:52 +020057 /* Clear/disable fixed MTRRs */
58 mov $fixed_mtrr_list_size, %ebx
59 xor %eax, %eax
60 xor %edx, %edx
61
62clear_fixed_mtrr:
63 add $-2, %ebx
64 movzwl fixed_mtrr_list(%ebx), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +020065 wrmsr
Arthur Heymansc2ccc972018-06-03 12:09:52 +020066 jnz clear_fixed_mtrr
Arthur Heymansdd4d8952018-06-03 12:04:26 +020067
68 /* Zero out all variable range MTRRs. */
69 movl $MTRR_CAP_MSR, %ecx
70 rdmsr
71 andl $0xff, %eax
72 shl $1, %eax
73 movl %eax, %edi
74 movl $0x200, %ecx
75 xorl %eax, %eax
76 xorl %edx, %edx
77clear_var_mtrrs:
78 wrmsr
79 add $1, %ecx
80 dec %edi
81 jnz clear_var_mtrrs
82
Arthur Heymansc2ccc972018-06-03 12:09:52 +020083 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
84 movl $0x80000008, %eax
85 cpuid
86 movb %al, %cl
87 sub $32, %cl
88 movl $1, %edx
89 shl %cl, %edx
90 subl $1, %edx
91
92 /* Preload high word of address mask (in %edx) for Variable
93 * MTRRs 0 and 1.
94 */
95addrsize_set_high:
96 xorl %eax, %eax
97 movl $MTRR_PHYS_MASK(0), %ecx
98 wrmsr
99 movl $MTRR_PHYS_MASK(1), %ecx
100 wrmsr
101
102
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200103 post_code(0x23)
104 /* Set Cache-as-RAM base address. */
105 movl $(MTRR_PHYS_BASE(0)), %ecx
106 movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
107 xorl %edx, %edx
108 wrmsr
109
110 post_code(0x24)
111 /* Set Cache-as-RAM mask. */
112 movl $(MTRR_PHYS_MASK(0)), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200113 rdmsr
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200114 movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200115 wrmsr
116
Arthur Heymans48bf7122019-01-05 17:18:11 +0100117 /* Enable cache for our code in Flash because we do XIP here */
118 movl $MTRR_PHYS_BASE(1), %ecx
119 xorl %edx, %edx
120 movl $CACHE_ROM_BASE | MTRR_TYPE_WRPROT, %eax
121 wrmsr
122
123 movl $MTRR_PHYS_MASK(1), %ecx
124 rdmsr
125 movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
126 wrmsr
127
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200128 post_code(0x25)
129
130 /* Enable MTRR. */
131 movl $MTRR_DEF_TYPE_MSR, %ecx
132 rdmsr
133 orl $MTRR_DEF_TYPE_EN, %eax
134 wrmsr
135
136 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
137 movl %cr0, %eax
138 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
139 invd
140 movl %eax, %cr0
141
Arthur Heymans48bf7122019-01-05 17:18:11 +0100142#if IS_ENABLED(CONFIG_MICROCODE_UPDATE_PRE_RAM)
143update_microcode:
144 /* put the return address in %esp */
145 movl $end_microcode_update, %esp
146 jmp update_bsp_microcode
147end_microcode_update:
148#endif
149 /* Disable caching to change MTRR's. */
150 movl %cr0, %eax
151 orl $CR0_CacheDisable, %eax
152 movl %eax, %cr0
153
154 /* Clear the mask valid to disable the MTRR */
155 movl $MTRR_PHYS_MASK(1), %ecx
156 rdmsr
157 andl $(~MTRR_PHYS_MASK_VALID), %eax
158 wrmsr
159
160 /* Enable cache. */
161 movl %cr0, %eax
162 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
163 invd
164 movl %eax, %cr0
165
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200166 /* enable the 'no eviction' mode */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100167 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200168 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100169 orl $1, %eax
170 andl $~2, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200171 wrmsr
172
173 /* Clear the cache memory region. This will also fill up the cache. */
174 movl $CACHE_AS_RAM_BASE, %esi
175 movl %esi, %edi
176 movl $(CACHE_AS_RAM_SIZE >> 2), %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200177 xorl %eax, %eax
178 rep stosl
179
180 /* enable the 'no eviction run' state */
Arthur Heymansa28befd2018-12-20 13:59:34 +0100181 movl $NoEvictMod_MSR, %ecx
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200182 rdmsr
Arthur Heymansa28befd2018-12-20 13:59:34 +0100183 orl $3, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200184 wrmsr
185
186 post_code(0x26)
187 /* Enable Cache-as-RAM mode by disabling cache. */
188 movl %cr0, %eax
189 orl $CR0_CacheDisable, %eax
190 movl %eax, %cr0
191
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200192 movl $MTRR_PHYS_MASK(1), %ecx
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200193 rdmsr
Arthur Heymans48bf7122019-01-05 17:18:11 +0100194 orl $MTRR_PHYS_MASK_VALID, %eax
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200195 wrmsr
196
197 post_code(0x28)
198 /* Enable cache. */
199 movl %cr0, %eax
200 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
201 movl %eax, %cr0
202
203 /* Setup the stack. */
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200204 mov $_car_stack_end, %esp
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200205
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200206 /* Need to align stack to 16 bytes at call instruction. Account for
207 the pushes below. */
Arthur Heymans348b79f2018-06-03 17:14:19 +0200208 andl $0xfffffff0, %esp
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200209 subl $4, %esp
Arthur Heymans348b79f2018-06-03 17:14:19 +0200210
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200211 /* push TSC and BIST to stack */
212 movd %mm0, %eax
213 pushl %eax /* BIST */
214 movd %mm2, %eax
215 pushl %eax /* tsc[63:32] */
216 movd %mm1, %eax
217 pushl %eax /* tsc[31:0] */
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200218
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200219before_c_entry:
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200220 post_code(0x29)
Kyösti Mälkkic641f7e2018-12-28 16:54:54 +0200221 call bootblock_c_entry_bist
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200222
223 /* Should never see this postcode */
224 post_code(POST_DEAD_CODE)
225
226
227.Lhlt:
228 hlt
229 jmp .Lhlt
230
Arthur Heymansc2ccc972018-06-03 12:09:52 +0200231fixed_mtrr_list:
232 .word MTRR_FIX_64K_00000
233 .word MTRR_FIX_16K_80000
234 .word MTRR_FIX_16K_A0000
235 .word MTRR_FIX_4K_C0000
236 .word MTRR_FIX_4K_C8000
237 .word MTRR_FIX_4K_D0000
238 .word MTRR_FIX_4K_D8000
239 .word MTRR_FIX_4K_E0000
240 .word MTRR_FIX_4K_E8000
241 .word MTRR_FIX_4K_F0000
242 .word MTRR_FIX_4K_F8000
243fixed_mtrr_list_size = . - fixed_mtrr_list
Arthur Heymansdd4d8952018-06-03 12:04:26 +0200244
245_cache_as_ram_setup_end: