blob: 9319ff01c42162015d15aba481071831a6fee6ef [file] [log] [blame]
Furquan Shaikh2af76f42014-04-28 16:39:40 -07001/*
2 * Based on arch/arm/include/asm/cacheflush.h
3 *
4 * Copyright (C) 1999-2002 Russell King.
5 * Copyright (C) 2012 ARM Ltd.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program; if not, write to the Free Software
Patrick Georgib890a122015-03-26 15:17:45 +010018 * Foundation, Inc.
Furquan Shaikh2af76f42014-04-28 16:39:40 -070019 */
20
21#include <arch/asm.h>
Joseph Loc4301f72015-04-14 16:03:58 +080022#include <arch/cache_helpers.h>
Furquan Shaikh2af76f42014-04-28 16:39:40 -070023
24/*
25 * Bring an ARMv8 processor we just gained control of (e.g. from IROM) into a
26 * known state regarding caches/SCTLR. Completely cleans and invalidates
27 * icache/dcache, disables MMU and dcache (if active), and enables unaligned
28 * accesses, icache and branch prediction (if inactive). Clobbers x4 and x5.
29 */
30ENTRY(arm_init_caches)
31 /* w4: SCTLR, return address: x8 (stay valid for the whole function) */
32 mov x8, x30
33 /* XXX: Assume that we always start running at EL3 */
34 mrs x4, sctlr_el3
35
36 /* FIXME: How to enable branch prediction on ARMv8? */
37
38 /* Flush and invalidate dcache */
Joseph Loc4301f72015-04-14 16:03:58 +080039 mov x0, #DCCISW
Furquan Shaikh2af76f42014-04-28 16:39:40 -070040 bl flush_dcache_all
41
42 /* Deactivate MMU (0), Alignment Check (1) and DCache (2) */
43 and x4, x4, # ~(1 << 0) & ~(1 << 1) & ~(1 << 2)
44 /* Activate ICache (12) already for speed */
45 orr x4, x4, #(1 << 12)
46 msr sctlr_el3, x4
47
48 /* Invalidate icache and TLB for good measure */
49 ic iallu
50 tlbi alle3
51 dsb sy
52 isb
53
54 ret x8
55ENDPROC(arm_init_caches)
56
57/* Based on u-boot transition.S */
58ENTRY(switch_el3_to_el2)
59 mov x0, #0x5b1 /* Non-secure EL0/EL1 | HVC | 64bit EL2 */
60 msr scr_el3, x0
61 msr cptr_el3, xzr /* Disable coprocessor traps to EL3 */
62 mov x0, #0x33ff
63 msr cptr_el2, x0 /* Disable coprocessor traps to EL2 */
64
65 /* Return to the EL2_SP2 mode from EL3 */
66 mov x0, sp
67 msr sp_el2, x0 /* Migrate SP */
68 mrs x0, vbar_el3
69 msr vbar_el2, x0 /* Migrate VBAR */
70 mrs x0, sctlr_el3
71 msr sctlr_el2, x0 /* Migrate SCTLR */
72 mov x0, #0x3c9
73 msr spsr_el3, x0 /* EL2_SP2 | D | A | I | F */
74 msr elr_el3, x30
75 eret
76ENDPROC(switch_el3_to_el2)