blob: 16bccbce45870b585b90f81ffbc78330d898285b [file] [log] [blame]
Stefan Reinauerdebb11f2008-10-29 04:46:52 +00001/*
2 * This file is part of the coreboot project.
3 *
Stefan Reinauerbc0f7a62010-08-01 15:41:14 +00004 * Copyright (C) 2008-2010 coresystems GmbH
Stefan Reinauerdebb11f2008-10-29 04:46:52 +00005 *
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License as
8 * published by the Free Software Foundation; version 2 of
9 * the License.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Stefan Reinauerdebb11f2008-10-29 04:46:52 +000015 */
16
Stefan Reinauer4da810b2009-07-21 21:41:42 +000017// Make sure no stage 2 code is included:
Myles Watson1d6d45e2009-11-06 17:02:51 +000018#define __PRE_RAM__
Stefan Reinauer4da810b2009-07-21 21:41:42 +000019
Stefan Reinauer14e22772010-04-27 06:56:47 +000020// FIXME: Is this piece of code southbridge specific, or
Stefan Reinauer4da810b2009-07-21 21:41:42 +000021// can it be cleaned up so this include is not required?
Stefan Reinauerbc0f7a62010-08-01 15:41:14 +000022// It's needed right now because we get our DEFAULT_PMBASE from
Stefan Reinauer5f5436f2010-04-25 20:42:02 +000023// here.
Martin Roth0fa92b32017-06-24 13:53:20 -060024#if IS_ENABLED(CONFIG_SOUTHBRIDGE_INTEL_I82801GX)
Elyes HAOUAS660389e2018-10-14 20:34:09 +020025#include <southbridge/intel/i82801gx/i82801gx.h>
Martin Roth0fa92b32017-06-24 13:53:20 -060026#elif IS_ENABLED(CONFIG_SOUTHBRIDGE_INTEL_I82801DX)
Elyes HAOUAS660389e2018-10-14 20:34:09 +020027#include <southbridge/intel/i82801dx/i82801dx.h>
Martin Roth0fa92b32017-06-24 13:53:20 -060028#elif IS_ENABLED(CONFIG_SOUTHBRIDGE_INTEL_I82801IX)
Elyes HAOUAS660389e2018-10-14 20:34:09 +020029#include <southbridge/intel/i82801ix/i82801ix.h>
Martin Rothb137c132017-07-21 10:23:57 -060030#elif IS_ENABLED(CONFIG_SOUTHBRIDGE_INTEL_I82801JX)
Elyes HAOUAS660389e2018-10-14 20:34:09 +020031#include <southbridge/intel/i82801jx/i82801jx.h>
Arthur Heymans349e0852017-04-09 20:48:37 +020032
Stefan Reinauerbc0f7a62010-08-01 15:41:14 +000033#else
34#error "Southbridge needs SMM handler support."
35#endif
Stefan Reinauerdebb11f2008-10-29 04:46:52 +000036
Edward O'Callaghan1104c272017-01-08 19:57:45 +110037// ADDR32() macro
Patrick Georgie8741fe2017-09-04 17:37:31 +020038#include <arch/registers.h>
Edward O'Callaghan1104c272017-01-08 19:57:45 +110039
Martin Roth0fa92b32017-06-24 13:53:20 -060040#if IS_ENABLED(CONFIG_SMM_TSEG)
Vladimir Serbinenko7f464202015-05-28 21:03:51 +020041#error "Don't use this file with TSEG."
Stefan Reinauer3aa067f2012-04-02 13:24:04 -070042
43#endif /* CONFIG_SMM_TSEG */
44
Stefan Reinauerdebb11f2008-10-29 04:46:52 +000045#define LAPIC_ID 0xfee00020
46
47.global smm_relocation_start
48.global smm_relocation_end
49
50/* initially SMM is some sort of real mode. */
51.code16
52
53/**
Stefan Reinauer8c5b58e2012-04-04 10:38:05 -070054 * When starting up, x86 CPUs have their SMBASE set to 0x30000. However,
55 * this is not a good place for the SMM handler to live, so it needs to
56 * be relocated.
57 * Traditionally SMM handlers used to live in the A segment (0xa0000).
58 * With growing SMM handlers, more CPU cores, etc. CPU vendors started
59 * allowing to relocate the handler to the end of physical memory, which
60 * they refer to as TSEG.
61 * This trampoline code relocates SMBASE to base address - ( lapicid * 0x400 )
Stefan Reinauerdebb11f2008-10-29 04:46:52 +000062 *
63 * Why 0x400? It is a safe value to cover the save state area per CPU. On
64 * current AMD CPUs this area is _documented_ to be 0x200 bytes. On Intel
65 * Core 2 CPUs the _documented_ parts of the save state area is 48 bytes
66 * bigger, effectively sizing our data structures 0x300 bytes.
67 *
Stefan Reinauer8c5b58e2012-04-04 10:38:05 -070068 * Example (with SMM handler living at 0xa0000):
69 *
Elyes HAOUAS9d759572018-05-28 15:41:12 +020070 * LAPICID SMBASE SMM Entry SAVE STATE
Stefan Reinauerdebb11f2008-10-29 04:46:52 +000071 * 0 0xa0000 0xa8000 0xafd00
72 * 1 0x9fc00 0xa7c00 0xaf900
73 * 2 0x9f800 0xa7800 0xaf500
74 * 3 0x9f400 0xa7400 0xaf100
75 * 4 0x9f000 0xa7000 0xaed00
76 * 5 0x9ec00 0xa6c00 0xae900
77 * 6 0x9e800 0xa6800 0xae500
78 * 7 0x9e400 0xa6400 0xae100
79 * 8 0x9e000 0xa6000 0xadd00
80 * 9 0x9dc00 0xa5c00 0xad900
81 * 10 0x9d800 0xa5800 0xad500
82 * 11 0x9d400 0xa5400 0xad100
83 * 12 0x9d000 0xa5000 0xacd00
84 * 13 0x9cc00 0xa4c00 0xac900
85 * 14 0x9c800 0xa4800 0xac500
86 * 15 0x9c400 0xa4400 0xac100
87 * . . . .
88 * . . . .
89 * . . . .
90 * 31 0x98400 0xa0400 0xa8100
91 *
92 * With 32 cores, the SMM handler would need to fit between
93 * 0xa0000-0xa0400 and the stub plus stack would need to go
94 * at 0xa8000-0xa8100 (example for core 0). That is not enough.
95 *
Elyes HAOUASd82be922016-07-28 18:58:27 +020096 * This means we're basically limited to 16 CPU cores before
Stefan Reinauer8c5b58e2012-04-04 10:38:05 -070097 * we need to move the SMM handler to TSEG.
Stefan Reinauerdebb11f2008-10-29 04:46:52 +000098 *
99 * Note: Some versions of Pentium M need their SMBASE aligned to 32k.
100 * On those the above only works for up to 2 cores. But for now we only
101 * care fore Core (2) Duo/Solo
102 *
103 */
104
105smm_relocation_start:
106 /* Check revision to see if AMD64 style SMM_BASE
107 * Intel Core Solo/Duo: 0x30007
108 * Intel Core2 Solo/Duo: 0x30100
Stefan Reinauer3aa067f2012-04-02 13:24:04 -0700109 * Intel SandyBridge: 0x30101
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000110 * AMD64: 0x3XX64
111 * This check does not make much sense, unless someone ports
112 * SMI handling to AMD64 CPUs.
113 */
114
115 mov $0x38000 + 0x7efc, %ebx
Edward O'Callaghan1104c272017-01-08 19:57:45 +1100116 ADDR32(mov) (%ebx), %al
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000117 cmp $0x64, %al
118 je 1f
Stefan Reinauer14e22772010-04-27 06:56:47 +0000119
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000120 mov $0x38000 + 0x7ef8, %ebx
121 jmp smm_relocate
1221:
123 mov $0x38000 + 0x7f00, %ebx
124
125smm_relocate:
126 /* Get this CPU's LAPIC ID */
127 movl $LAPIC_ID, %esi
Edward O'Callaghan1104c272017-01-08 19:57:45 +1100128 ADDR32(movl) (%esi), %ecx
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000129 shr $24, %ecx
Stefan Reinauer14e22772010-04-27 06:56:47 +0000130
131 /* calculate offset by multiplying the
Elyes HAOUASd6e96862016-08-21 10:12:15 +0200132 * APIC ID by 1024 (0x400)
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000133 */
134 movl %ecx, %edx
135 shl $10, %edx
136
137 movl $0xa0000, %eax
138 subl %edx, %eax /* subtract offset, see above */
139
Edward O'Callaghan1104c272017-01-08 19:57:45 +1100140 ADDR32(movl) %eax, (%ebx)
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000141
Stefan Reinauerbc0f7a62010-08-01 15:41:14 +0000142 /* The next section of code is potentially southbridge specific */
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000143
144 /* Clear SMI status */
145 movw $(DEFAULT_PMBASE + 0x34), %dx
146 inw %dx, %ax
147 outw %ax, %dx
148
149 /* Clear PM1 status */
150 movw $(DEFAULT_PMBASE + 0x00), %dx
151 inw %dx, %ax
152 outw %ax, %dx
153
154 /* Set EOS bit so other SMIs can occur */
155 movw $(DEFAULT_PMBASE + 0x30), %dx
156 inl %dx, %eax
157 orl $(1 << 1), %eax
158 outl %eax, %dx
159
Stefan Reinauerbc0f7a62010-08-01 15:41:14 +0000160 /* End of southbridge specific section. */
161
Martin Roth0fa92b32017-06-24 13:53:20 -0600162#if IS_ENABLED(CONFIG_DEBUG_SMM_RELOCATION)
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000163 /* print [SMM-x] so we can determine if CPUx went to SMM */
Stefan Reinauer08670622009-06-30 15:17:49 +0000164 movw $CONFIG_TTYS0_BASE, %dx
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000165 mov $'[', %al
166 outb %al, %dx
167 mov $'S', %al
168 outb %al, %dx
169 mov $'M', %al
170 outb %al, %dx
171 outb %al, %dx
172 movb $'-', %al
173 outb %al, %dx
Elyes HAOUASd82be922016-07-28 18:58:27 +0200174 /* calculate ascii of CPU number. More than 9 cores? -> FIXME */
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000175 movb %cl, %al
Stefan Reinauer14e22772010-04-27 06:56:47 +0000176 addb $'0', %al
Stefan Reinauerdebb11f2008-10-29 04:46:52 +0000177 outb %al, %dx
178 mov $']', %al
179 outb %al, %dx
180 mov $'\r', %al
181 outb %al, %dx
182 mov $'\n', %al
183 outb %al, %dx
184#endif
185
186 /* That's it. return */
187 rsm
188smm_relocation_end: