Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 1 | /* |
| 2 | * This file is part of the coreboot project. |
| 3 | * |
| 4 | * Copyright 2014 Google Inc. |
| 5 | * |
| 6 | * This program is free software; you can redistribute it and/or modify |
| 7 | * it under the terms of the GNU General Public License as published by |
| 8 | * the Free Software Foundation; version 2 of the License. |
| 9 | * |
| 10 | * This program is distributed in the hope that it will be useful, |
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 13 | * GNU General Public License for more details. |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 14 | */ |
| 15 | |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 16 | /* |
| 17 | * ======================== stage_entry.S ===================================== |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 18 | * This file acts as an entry point to the different stages of arm64. They share |
| 19 | * the same process of setting up stacks and jumping to c code. It is important |
| 20 | * to save x25 from corruption as it contains the argument for rmodule. |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 21 | * ============================================================================= |
| 22 | */ |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 23 | |
| 24 | #include <arch/asm.h> |
Furquan Shaikh | 136657c | 2014-09-09 09:43:08 -0700 | [diff] [blame] | 25 | #define __ASSEMBLY__ |
| 26 | #include <arch/lib_helpers.h> |
Aaron Durbin | 1c65129 | 2014-08-27 12:50:26 -0500 | [diff] [blame] | 27 | |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 28 | #define STACK_SZ CONFIG_STACK_SIZE |
| 29 | #define EXCEPTION_STACK_SZ CONFIG_STACK_SIZE |
| 30 | |
| 31 | /* |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 32 | * Stack for armv8 CPU grows down from _estack. Additionally, provide exception |
| 33 | * stack for the CPU. |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 34 | */ |
| 35 | .section .bss, "aw", @nobits |
Furquan Shaikh | 9482498 | 2014-11-21 15:42:40 -0800 | [diff] [blame] | 36 | |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 37 | .global _stack |
| 38 | .global _estack |
| 39 | .balign STACK_SZ |
| 40 | _stack: |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 41 | .space STACK_SZ |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 42 | _estack: |
| 43 | |
| 44 | .global _stack_exceptions |
| 45 | .global _estack_exceptions |
| 46 | .balign EXCEPTION_STACK_SZ |
| 47 | _stack_exceptions: |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 48 | .space EXCEPTION_STACK_SZ |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 49 | _estack_exceptions: |
| 50 | |
| 51 | ENTRY(cpu_get_stack) |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 52 | ldr x0, 1f |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 53 | ret |
| 54 | .align 3 |
| 55 | 1: |
| 56 | .quad _estack |
| 57 | ENDPROC(cpu_get_stack) |
| 58 | |
| 59 | ENTRY(cpu_get_exception_stack) |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 60 | ldr x0, 1f |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 61 | ret |
| 62 | .align 3 |
| 63 | 1: |
| 64 | .quad _estack_exceptions |
| 65 | ENDPROC(cpu_get_exception_stack) |
| 66 | |
Aaron Durbin | 1c65129 | 2014-08-27 12:50:26 -0500 | [diff] [blame] | 67 | /* |
| 68 | * Boot strap the processor into a C environemnt. That consists of providing |
| 69 | * 16-byte aligned stack. The programming enviroment uses SP_EL0 as its main |
| 70 | * stack while keeping SP_ELx reserved for exception entry. |
| 71 | */ |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 72 | /* |
| 73 | * IMPORTANT: Ensure x25 is not corrupted because it saves the argument to |
Aaron Durbin | 8c8e2b7 | 2015-10-14 10:08:10 -0500 | [diff] [blame] | 74 | * any rmodules. |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 75 | */ |
Aaron Durbin | 1c65129 | 2014-08-27 12:50:26 -0500 | [diff] [blame] | 76 | ENTRY(arm64_c_environment) |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 77 | /* Set the exception stack for the cpu. */ |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 78 | bl cpu_get_exception_stack |
Furquan Shaikh | 1af7b5d | 2014-08-21 12:52:06 -0700 | [diff] [blame] | 79 | msr SPSel, #1 |
| 80 | isb |
Furquan Shaikh | 1af7b5d | 2014-08-21 12:52:06 -0700 | [diff] [blame] | 81 | mov sp, x0 |
| 82 | |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 83 | /* Have stack pointer use SP_EL0. */ |
| 84 | msr SPSel, #0 |
| 85 | isb |
| 86 | |
Furquan Shaikh | b3f6ad3 | 2015-10-15 12:15:31 -0700 | [diff] [blame] | 87 | /* Set the non-exception stack for the cpu. */ |
Aaron Durbin | a5c7f66 | 2014-08-27 14:45:59 -0500 | [diff] [blame] | 88 | bl cpu_get_stack |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 89 | mov sp, x0 |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 90 | |
Aaron Durbin | 3a0013d | 2014-08-27 15:52:01 -0500 | [diff] [blame] | 91 | /* Get entry point by dereferencing c_entry. */ |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 92 | ldr x1, 1f |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 93 | /* Move back the arguments from x25 to x0 */ |
| 94 | mov x0, x25 |
| 95 | br x1 |
Aaron Durbin | 3a0013d | 2014-08-27 15:52:01 -0500 | [diff] [blame] | 96 | .align 3 |
| 97 | 1: |
| 98 | .quad c_entry |
Aaron Durbin | 1c65129 | 2014-08-27 12:50:26 -0500 | [diff] [blame] | 99 | ENDPROC(arm64_c_environment) |
| 100 | |
Aaron Durbin | dde7629 | 2015-09-05 12:59:26 -0500 | [diff] [blame] | 101 | ENTRY(_start) |
Aaron Durbin | 8c8e2b7 | 2015-10-14 10:08:10 -0500 | [diff] [blame] | 102 | /* Save any arguments to current rmodule in x25 */ |
Aaron Durbin | dee1996 | 2014-09-18 13:48:49 -0500 | [diff] [blame] | 103 | mov x25, x0 |
| 104 | b arm64_c_environment |
Aaron Durbin | dde7629 | 2015-09-05 12:59:26 -0500 | [diff] [blame] | 105 | ENDPROC(_start) |
Furquan Shaikh | ab020f3 | 2014-09-10 12:19:38 -0700 | [diff] [blame] | 106 | |
Furquan Shaikh | 9482498 | 2014-11-21 15:42:40 -0800 | [diff] [blame] | 107 | /* |
| 108 | * Setup SCTLR so that: |
| 109 | * Little endian mode is setup, XN is not enforced, MMU and caches are disabled. |
| 110 | * Alignment and stack alignment checks are disabled. |
| 111 | */ |
| 112 | .macro setup_sctlr |
Aaron Durbin | dee1996 | 2014-09-18 13:48:49 -0500 | [diff] [blame] | 113 | read_current x0, sctlr |
Aaron Durbin | 1c65129 | 2014-08-27 12:50:26 -0500 | [diff] [blame] | 114 | bic x0, x0, #(1 << 25) /* Little Endian */ |
| 115 | bic x0, x0, #(1 << 19) /* XN not enforced */ |
| 116 | bic x0, x0, #(1 << 12) /* Disable Instruction Cache */ |
Furquan Shaikh | 9482498 | 2014-11-21 15:42:40 -0800 | [diff] [blame] | 117 | bic x0, x0, #0xf /* Clear SA, C, A and M */ |
Furquan Shaikh | 136657c | 2014-09-09 09:43:08 -0700 | [diff] [blame] | 118 | write_current sctlr, x0, x1 |
Furquan Shaikh | 9482498 | 2014-11-21 15:42:40 -0800 | [diff] [blame] | 119 | .endm |
| 120 | |
Aaron Durbin | dee1996 | 2014-09-18 13:48:49 -0500 | [diff] [blame] | 121 | CPU_RESET_ENTRY(arm64_cpu_startup) |
Furquan Shaikh | 36d3586 | 2015-03-27 22:55:59 -0700 | [diff] [blame] | 122 | bl arm64_cpu_early_setup |
Furquan Shaikh | 9482498 | 2014-11-21 15:42:40 -0800 | [diff] [blame] | 123 | setup_sctlr |
| 124 | b arm64_c_environment |
Aaron Durbin | 1c65129 | 2014-08-27 12:50:26 -0500 | [diff] [blame] | 125 | ENDPROC(arm64_cpu_startup) |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 126 | |
Furquan Shaikh | 6e1dc0c | 2015-07-10 15:25:26 -0700 | [diff] [blame] | 127 | /* |
| 128 | * stage_entry is defined as a weak symbol to allow SoCs/CPUs to define a custom |
| 129 | * entry point to perform any fixups that need to be done immediately after |
| 130 | * power on reset. In case SoC/CPU does not need any custom-defined entrypoint, |
| 131 | * this weak symbol can be used to jump directly to arm64_cpu_startup. |
| 132 | */ |
| 133 | ENTRY_WEAK(stage_entry) |
Furquan Shaikh | 9482498 | 2014-11-21 15:42:40 -0800 | [diff] [blame] | 134 | b arm64_cpu_startup |
Aaron Durbin | 0df877a | 2014-07-10 12:40:30 -0500 | [diff] [blame] | 135 | ENDPROC(stage_entry) |