blob: b085b4d6714c3e57353bb70e5cc201215512f492 [file] [log] [blame]
Angel Pons8a3453f2020-04-02 23:48:19 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +02002
3#include <stdint.h>
Arthur Heymans750d57f2020-08-07 22:12:09 +02004#include <cbmem.h>
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +02005#include <cpu/x86/msr.h>
6#include <cpu/x86/mtrr.h>
7#include <cpu/amd/mtrr.h>
8#include <cpu/x86/cache.h>
Kyösti Mälkkid4955f02017-09-08 07:14:17 +03009#include <northbridge/amd/agesa/agesa_helper.h>
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020010
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020011/* TODO: Do we want MTRR_DEF_TYPE_MSR too? */
12static const uint32_t msr_backup[] = {
13 MTRR_FIX_64K_00000,
14 MTRR_FIX_16K_80000,
15 MTRR_FIX_16K_A0000,
16 MTRR_FIX_4K_C0000,
17 MTRR_FIX_4K_C8000,
18 MTRR_FIX_4K_D0000,
19 MTRR_FIX_4K_D8000,
20 MTRR_FIX_4K_E0000,
21 MTRR_FIX_4K_E8000,
22 MTRR_FIX_4K_F0000,
23 MTRR_FIX_4K_F8000,
24 MTRR_PHYS_BASE(0),
25 MTRR_PHYS_MASK(0),
26 MTRR_PHYS_BASE(1),
27 MTRR_PHYS_MASK(1),
28 MTRR_PHYS_BASE(2),
29 MTRR_PHYS_MASK(2),
30 MTRR_PHYS_BASE(3),
31 MTRR_PHYS_MASK(3),
32 MTRR_PHYS_BASE(4),
33 MTRR_PHYS_MASK(4),
34 MTRR_PHYS_BASE(5),
35 MTRR_PHYS_MASK(5),
36 MTRR_PHYS_BASE(6),
37 MTRR_PHYS_MASK(6),
38 MTRR_PHYS_BASE(7),
39 MTRR_PHYS_MASK(7),
40 SYSCFG_MSR,
41 TOP_MEM,
42 TOP_MEM2,
43};
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020044
Arthur Heymans750d57f2020-08-07 22:12:09 +020045void backup_mtrr(void)
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020046{
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020047 msr_t syscfg_msr;
Arthur Heymans750d57f2020-08-07 22:12:09 +020048 msr_t *mtrr_save = (msr_t *)cbmem_add(CBMEM_ID_AGESA_MTRR,
49 sizeof(msr_t) * ARRAY_SIZE(msr_backup));
50 if (!mtrr_save)
51 return;
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020052
53 /* Enable access to AMD RdDram and WrDram extension bits */
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020054 syscfg_msr = rdmsr(SYSCFG_MSR);
55 syscfg_msr.lo |= SYSCFG_MSR_MtrrFixDramModEn;
56 wrmsr(SYSCFG_MSR, syscfg_msr);
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020057
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020058 for (int i = 0; i < ARRAY_SIZE(msr_backup); i++)
59 *mtrr_save++ = rdmsr(msr_backup[i]);
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020060
61 /* Disable access to AMD RdDram and WrDram extension bits */
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020062 syscfg_msr = rdmsr(SYSCFG_MSR);
63 syscfg_msr.lo &= ~SYSCFG_MSR_MtrrFixDramModEn;
64 wrmsr(SYSCFG_MSR, syscfg_msr);
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020065}
66
67void restore_mtrr(void)
68{
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020069 msr_t syscfg_msr;
Arthur Heymans750d57f2020-08-07 22:12:09 +020070 msr_t *mtrr_save = (msr_t *)cbmem_find(CBMEM_ID_AGESA_MTRR);
71
72 if (!mtrr_save)
73 return;
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020074
75 /* Enable access to AMD RdDram and WrDram extension bits */
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020076 syscfg_msr = rdmsr(SYSCFG_MSR);
77 syscfg_msr.lo |= SYSCFG_MSR_MtrrFixDramModEn;
78 wrmsr(SYSCFG_MSR, syscfg_msr);
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020079
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020080 for (int i = 0; i < ARRAY_SIZE(msr_backup); i++)
81 wrmsr(msr_backup[i], *mtrr_save++);
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020082
83 /* Disable access to AMD RdDram and WrDram extension bits */
Arthur Heymansdf3d97e2020-08-07 21:55:20 +020084 syscfg_msr = rdmsr(SYSCFG_MSR);
85 syscfg_msr.lo &= ~SYSCFG_MSR_MtrrFixDramModEn;
86 wrmsr(SYSCFG_MSR, syscfg_msr);
Kyösti Mälkki5fdb95e2015-01-01 17:51:51 +020087}