blob: fab220417deca5941083d8db4fcde05967def8fc [file] [log] [blame]
Patrick Georgiac959032020-05-05 22:49:26 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Varadarajan Narayanana6935c22016-03-02 16:57:10 +05302
3/*
4 * TZ expects the ARM core to be in 'ARM' mode. However, coreboot seems
5 * to be compiled in mixed thumb/arm mode. Hence create a glue function
6 * to invoke TZ.
7 */
8
9#include <arch/asm.h>
10
11/*
12 * Force ARM mode. Else this gets assembled with mixed ARM and
13 * Thumb instructions. We set up everything and jump to TZBSP
14 * using the 'blx' instruction. For 'blx' if the last bit of the
15 * destination address is zero, it switches to ARM mode. Since,
16 * we are already in ARM mode, nothing to switch as such.
17 *
18 * However, when TZBSP returns, the CPU is still in ARM mode.
19 * If the assembler inserts Thumb instructions between the point
20 * of return from TZBSP to the 'bx' instruction we are hosed.
21 * Hence forcing ARM mode.
22 *
23 * Rest of the code can be compiled in mixed ARM/Thumb mode.
24 * Since tz_init_wrapper is being forced as an ARM symbol,
25 * callers will use 'blx' to come here forcing a switch to
26 * ARM mode. The wrapper does its job and returns back to the
27 * Thumb caller.
28 */
29.arm
30/*
31 * int tz_init_wrapper(int, int, void *);
32 */
33ENTRY(tz_init_wrapper)
34 .global tz_init_wrapper
35
36 /*
37 * r0 = tz_arg1
38 * r1 = tz_arg2
39 * r2 = tz_load_addr
40 */
41
42 /*
43 * Per the AAPCS
44 * r0, r1, r2, r3, r12 can be clobbered
45 * r4, r5, r6, r7, r8, r9, r10, r11 have to be preserved
46 *
47 * Following code clobbers
48 * r0 - Setting return value to zero
49 * r1 - For doing a thumb return
50 * r3 - Passing 'SP' from current mode to 'svc' mode
51 * r4 - To save & restore CPSR
52 *
53 * Per AAPCS, save and restore r4, rest are 'clobberable' :)
54 * The invoked code takes care of saving and restoring the other
55 * preserved registers (i.e. r5 - r11)
56 *
57 * Stack Usage:
58 * SP -> | LR | (Lower address)
59 * | r4 |
60 * | CPSR |
61 * |-------|
62 * | . |
63 * | . |
64 * | . | (Higher address)
65 */
66
67 sub sp, sp, #12 /* Allocate stack frame */
68 str lr, [sp] /* Save return address */
69 str r4, [sp, #4] /* Use r4 to hold the new CPSR value */
70
71 mov r3, sp /* Get current stack pointer */
72
73 mrs r4, CPSR /* save CPSR */
74 str r4, [sp, #8]
75
76 bic r4, r4, 0x1f /* Clear mode bits */
77 orr r4, r4, 0x13 /* 'svc' mode */
78 msr cpsr_cxf, r4 /* Switch to Supervisor mode. */
79 mov sp, r3 /* Use the same stack as the previous mode */
80
81 blx r2 /* Jump to TZ in ARM mode */
82
83 nop /* back from TZ, in ARM mode */
84
85 ldr r4, [sp, #8] /* restore CPSR */
86 msr cpsr_cxf, r4
87
88 ldr r4, [sp, #4] /* restore r4 */
89
90 ldr lr, [sp] /* saved return address */
91 add sp, sp, #12 /* free stack frame */
92
93 bx lr /* back to thumb caller */
94
95ENDPROC(tz_init_wrapper)