blob: 1a1818ffee35bb23ce08a682f3379a5826e15950 [file] [log] [blame]
Angel Pons8a3453f2020-04-02 23:48:19 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Kyösti Mälkki63fac812017-09-02 16:41:43 +03002
Kyösti Mälkkia963acd2019-08-16 20:34:25 +03003#include <arch/romstage.h>
Kyösti Mälkki63fac812017-09-02 16:41:43 +03004#include <cbmem.h>
5#include <console/console.h>
Elyes HAOUAS2195f7a2019-06-21 07:20:12 +02006#include <commonlib/helpers.h>
Kyösti Mälkki63fac812017-09-02 16:41:43 +03007#include <cpu/amd/mtrr.h>
8#include <cpu/cpu.h>
Kyösti Mälkki63fac812017-09-02 16:41:43 +03009#include <cpu/x86/msr.h>
10#include <cpu/x86/mtrr.h>
11#include <northbridge/amd/agesa/agesa_helper.h>
Arthur Heymans4e619b22022-02-15 11:01:37 +010012#include <romstage_handoff.h>
Kyösti Mälkki63fac812017-09-02 16:41:43 +030013
14static void set_range_uc(u32 base, u32 size)
15{
16 int i, max_var_mtrrs;
17 msr_t msr;
Subrata Banik7578ea42022-03-30 23:57:37 +053018 max_var_mtrrs = get_var_mtrr_count();
Kyösti Mälkki63fac812017-09-02 16:41:43 +030019
20 for (i = 0; i < max_var_mtrrs; i++) {
21 msr = rdmsr(MTRR_PHYS_MASK(i));
22 if (!(msr.lo & MTRR_PHYS_MASK_VALID))
Elyes HAOUAS0bc5d9d2022-01-27 07:55:34 +010023 break;
Kyösti Mälkki63fac812017-09-02 16:41:43 +030024 }
25 if (i == max_var_mtrrs)
26 die("Run out of unused MTRRs\n");
27
28 msr.hi = 0;
29 msr.lo = base | MTRR_TYPE_UNCACHEABLE;
30 wrmsr(MTRR_PHYS_BASE(i), msr);
31
32 msr.hi = (1 << (cpu_phys_address_size() - 32)) - 1;
33 msr.lo = ~(size - 1) | MTRR_PHYS_MASK_VALID;
34 wrmsr(MTRR_PHYS_MASK(i), msr);
35}
36
37void fixup_cbmem_to_UC(int s3resume)
38{
39 if (s3resume)
40 return;
41
42 /* For normal path, INIT_POST has returned with all
43 * memory set WB cacheable. But we need CBMEM as UC
44 * to make CAR teardown with invalidation without
45 * writeback possible.
46 */
47
48 uintptr_t top_of_ram = (uintptr_t) cbmem_top();
49 top_of_ram = ALIGN_UP(top_of_ram, 4 * MiB);
50
51 set_range_uc(top_of_ram - 4 * MiB, 4 * MiB);
52 set_range_uc(top_of_ram - 8 * MiB, 4 * MiB);
53}
54
Arthur Heymans4e619b22022-02-15 11:01:37 +010055static void recover_postcar_frame(struct postcar_frame *pcf)
Kyösti Mälkki63fac812017-09-02 16:41:43 +030056{
57 msr_t base, mask;
58 int i;
Arthur Heymans4e619b22022-02-15 11:01:37 +010059 int s3resume = romstage_handoff_is_resume();
Kyösti Mälkki63fac812017-09-02 16:41:43 +030060
61 /* Replicate non-UC MTRRs as left behind by AGESA.
62 */
Arthur Heymans46b409d2021-05-14 13:19:43 +020063 for (i = 0; i < pcf->mtrr->max_var_mtrrs; i++) {
Kyösti Mälkki63fac812017-09-02 16:41:43 +030064 mask = rdmsr(MTRR_PHYS_MASK(i));
65 base = rdmsr(MTRR_PHYS_BASE(i));
66 u32 size = ~(mask.lo & ~0xfff) + 1;
67 u8 type = base.lo & 0x7;
68 base.lo &= ~0xfff;
69
70 if (!(mask.lo & MTRR_PHYS_MASK_VALID) ||
71 (type == MTRR_TYPE_UNCACHEABLE))
72 continue;
73
74 postcar_frame_add_mtrr(pcf, base.lo, size, type);
75 }
76
77 /* For S3 resume path, INIT_RESUME does not return with
78 * memory covering CBMEM set as WB cacheable. For better
79 * speed make them WB after CAR teardown.
80 */
81 if (s3resume) {
82 uintptr_t top_of_ram = (uintptr_t) cbmem_top();
83 top_of_ram = ALIGN_DOWN(top_of_ram, 4*MiB);
84
85 postcar_frame_add_mtrr(pcf, top_of_ram - 4*MiB, 4*MiB,
86 MTRR_TYPE_WRBACK);
87 postcar_frame_add_mtrr(pcf, top_of_ram - 8*MiB, 4*MiB,
88 MTRR_TYPE_WRBACK);
89 }
90}
Arthur Heymans4e619b22022-02-15 11:01:37 +010091
92void fill_postcar_frame(struct postcar_frame *pcf)
93{
94 pcf->skip_common_mtrr = 1;
95 recover_postcar_frame(pcf);
96}