blob: 60930db68ae3a78827fad293e9680acd0b4d7aa4 [file] [log] [blame]
Andrew Walbran15068b02022-03-22 15:57:34 +00001/*
2 * Copyright 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * https://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Pierre-Clément Tosie328f4a2022-10-27 11:43:10 +010017#include <common.h>
Andrew Walbran267f6c12022-03-24 11:26:36 +000018
19.set .L_MAIR_DEV_nGnRE, 0x04
20.set .L_MAIR_MEM_WBWA, 0xff
21.set .Lmairval, .L_MAIR_DEV_nGnRE | (.L_MAIR_MEM_WBWA << 8)
22
23/* 4 KiB granule size for TTBR0_EL1. */
24.set .L_TCR_TG0_4KB, 0x0 << 14
25/* 4 KiB granule size for TTBR1_EL1. */
26.set .L_TCR_TG1_4KB, 0x2 << 30
27/* Disable translation table walk for TTBR1_EL1, generating a translation fault instead. */
28.set .L_TCR_EPD1, 0x1 << 23
29/* Translation table walks for TTBR0_EL1 are inner sharable. */
30.set .L_TCR_SH_INNER, 0x3 << 12
31/*
32 * Translation table walks for TTBR0_EL1 are outer write-back read-allocate write-allocate
33 * cacheable.
34 */
35.set .L_TCR_RGN_OWB, 0x1 << 10
36/*
37 * Translation table walks for TTBR0_EL1 are inner write-back read-allocate write-allocate
38 * cacheable.
39 */
40.set .L_TCR_RGN_IWB, 0x1 << 8
41/* Size offset for TTBR0_EL1 is 2**39 bytes (512 GiB). */
42.set .L_TCR_T0SZ_512, 64 - 39
43.set .Ltcrval, .L_TCR_TG0_4KB | .L_TCR_TG1_4KB | .L_TCR_EPD1 | .L_TCR_RGN_OWB
44.set .Ltcrval, .Ltcrval | .L_TCR_RGN_IWB | .L_TCR_SH_INNER | .L_TCR_T0SZ_512
45
46/* Stage 1 instruction access cacheability is unaffected. */
47.set .L_SCTLR_ELx_I, 0x1 << 12
48/* SP alignment fault if SP is not aligned to a 16 byte boundary. */
49.set .L_SCTLR_ELx_SA, 0x1 << 3
50/* Stage 1 data access cacheability is unaffected. */
51.set .L_SCTLR_ELx_C, 0x1 << 2
52/* EL0 and EL1 stage 1 MMU enabled. */
53.set .L_SCTLR_ELx_M, 0x1 << 0
54/* Privileged Access Never is unchanged on taking an exception to EL1. */
55.set .L_SCTLR_EL1_SPAN, 0x1 << 23
Andrew Walbrana0ecabd2022-04-11 14:26:18 +000056/* All writable memory regions are treated as XN. */
57.set .L_SCTLR_EL1_WXN, 0x1 << 19
Andrew Walbran267f6c12022-03-24 11:26:36 +000058/* SETEND instruction disabled at EL0 in aarch32 mode. */
59.set .L_SCTLR_EL1_SED, 0x1 << 8
60/* Various IT instructions are disabled at EL0 in aarch32 mode. */
61.set .L_SCTLR_EL1_ITD, 0x1 << 7
62.set .L_SCTLR_EL1_RES1, (0x1 << 11) | (0x1 << 20) | (0x1 << 22) | (0x1 << 28) | (0x1 << 29)
63.set .Lsctlrval, .L_SCTLR_ELx_M | .L_SCTLR_ELx_C | .L_SCTLR_ELx_SA | .L_SCTLR_EL1_ITD | .L_SCTLR_EL1_SED
Andrew Walbrana0ecabd2022-04-11 14:26:18 +000064.set .Lsctlrval, .Lsctlrval | .L_SCTLR_ELx_I | .L_SCTLR_EL1_SPAN | .L_SCTLR_EL1_RES1 | .L_SCTLR_EL1_WXN
65
Jakob Vukalovic586bb7c2023-01-22 19:38:39 +000066/* SMC function IDs */
67.set .L_SMCCC_VERSION_ID, 0x80000000
68.set .L_SMCCC_TRNG_VERSION_ID, 0x84000050
69.set .L_SMCCC_TRNG_FEATURES_ID, 0x84000051
70.set .L_SMCCC_TRNG_RND64_ID, 0xc4000053
71
72/* SMC function versions */
73.set .L_SMCCC_VERSION_1_1, 0x0101
74.set .L_SMCCC_TRNG_VERSION_1_0, 0x0100
75
Jakob Vukalovic586bb7c2023-01-22 19:38:39 +000076/**
77 * This macro stores a random value into a register.
78 * If a TRNG backed is not present or if an error occurs, the value remains unchanged.
79 */
80.macro rnd_reg reg:req
81 mov x20, x0
82 mov x21, x1
83 mov x22, x2
84 mov x23, x3
85
86 /* Verify SMCCC version >=1.1 */
87 hvc_call .L_SMCCC_VERSION_ID
88 cmp w0, 0
89 b.lt 100f
90 cmp w0, .L_SMCCC_VERSION_1_1
91 b.lt 100f
92
93 /* Verify TRNG ABI version 1.x */
94 hvc_call .L_SMCCC_TRNG_VERSION_ID
95 cmp w0, 0
96 b.lt 100f
97 cmp w0, .L_SMCCC_TRNG_VERSION_1_0
98 b.lt 100f
99
100 /* Call TRNG_FEATURES, ensure TRNG_RND is implemented */
101 mov_i x1, .L_SMCCC_TRNG_RND64_ID
102 hvc_call .L_SMCCC_TRNG_FEATURES_ID
103 cmp w0, 0
104 b.lt 100f
105
106 /* Call TRNG_RND, request 64 bits of entropy */
107 mov x1, #64
108 hvc_call .L_SMCCC_TRNG_RND64_ID
109 cmp x0, 0
110 b.lt 100f
111
112 mov \reg, x3
113 b 101f
114
115100:
116 reset_or_hang
117101:
118 mov x0, x20
119 mov x1, x21
120 mov x2, x22
121 mov x3, x23
122.endm
David Brazdila51c6f02022-10-12 09:51:48 +0000123
Andrew Walbran15068b02022-03-22 15:57:34 +0000124/**
Andrew Walbrane03395a2022-04-29 15:15:49 +0000125 * This is a generic entry point for an image. It carries out the operations required to prepare the
126 * loaded image to be run. Specifically, it zeroes the bss section using registers x25 and above,
127 * prepares the stack, enables floating point, and sets up the exception vector. It preserves x0-x3
128 * for the Rust entry point, as these may contain boot parameters.
Andrew Walbran15068b02022-03-22 15:57:34 +0000129 */
130.section .init.entry, "ax"
131.global entry
132entry:
Andrew Walbrane03395a2022-04-29 15:15:49 +0000133 /* Load and apply the memory management configuration, ready to enable MMU and caches. */
Andrew Walbran267f6c12022-03-24 11:26:36 +0000134
Pierre-Clément Tosid40ff912022-06-30 16:11:33 +0100135 adr x30, vector_table_panic
136 msr vbar_el1, x30
137
Pierre-Clément Tosi2cba9732022-10-26 20:38:51 +0100138 /*
139 * Our load address is set by the host so validate it before proceeding.
140 */
141 adr x30, entry
142 mov_i x29, entry
143 cmp x29, x30
144 b.eq 1f
145 reset_or_hang
1461:
147
Andrew Walbrane03395a2022-04-29 15:15:49 +0000148 adrp x30, idmap
149 msr ttbr0_el1, x30
Andrew Walbran267f6c12022-03-24 11:26:36 +0000150
Andrew Walbrane03395a2022-04-29 15:15:49 +0000151 mov_i x30, .Lmairval
152 msr mair_el1, x30
153
154 mov_i x30, .Ltcrval
Andrew Walbran267f6c12022-03-24 11:26:36 +0000155 /* Copy the supported PA range into TCR_EL1.IPS. */
Andrew Walbrane03395a2022-04-29 15:15:49 +0000156 mrs x29, id_aa64mmfr0_el1
157 bfi x30, x29, #32, #4
Andrew Walbran267f6c12022-03-24 11:26:36 +0000158
Andrew Walbrane03395a2022-04-29 15:15:49 +0000159 msr tcr_el1, x30
160
161 mov_i x30, .Lsctlrval
Andrew Walbran267f6c12022-03-24 11:26:36 +0000162
163 /*
164 * Ensure everything before this point has completed, then invalidate any potentially stale
165 * local TLB entries before they start being used.
166 */
167 isb
168 tlbi vmalle1
169 ic iallu
170 dsb nsh
171 isb
172
173 /*
Andrew Walbrane03395a2022-04-29 15:15:49 +0000174 * Configure sctlr_el1 to enable MMU and cache and don't proceed until this has completed.
Andrew Walbran267f6c12022-03-24 11:26:36 +0000175 */
Andrew Walbrane03395a2022-04-29 15:15:49 +0000176 msr sctlr_el1, x30
Andrew Walbran267f6c12022-03-24 11:26:36 +0000177 isb
178
Andrew Walbran15068b02022-03-22 15:57:34 +0000179 /* Disable trapping floating point access in EL1. */
180 mrs x30, cpacr_el1
181 orr x30, x30, #(0x3 << 20)
182 msr cpacr_el1, x30
183 isb
184
185 /* Zero out the bss section. */
186 adr_l x29, bss_begin
187 adr_l x30, bss_end
1880: cmp x29, x30
189 b.hs 1f
190 stp xzr, xzr, [x29], #16
191 b 0b
192
Andrew Walbrana0ecabd2022-04-11 14:26:18 +00001931: /* Copy the data section. */
194 adr_l x28, data_begin
195 adr_l x29, data_end
196 adr_l x30, data_lma
1972: cmp x28, x29
198 b.ge 3f
199 ldp q0, q1, [x30], #32
200 stp q0, q1, [x28], #32
201 b 2b
202
Pierre-Clément Tosiccc11382023-04-21 16:44:53 +01002033: /* Prepare the exception handler stack (SP_EL1). */
204 adr_l x30, init_eh_stack_pointer
205 msr spsel, #1
206 mov sp, x30
207
208 /* Prepare the main thread stack (SP_EL0). */
Pierre-Clément Tosi23aba522023-04-21 17:03:50 +0100209 adr_l x30, init_stack_pointer
Pierre-Clément Tosiccc11382023-04-21 16:44:53 +0100210 msr spsel, #0
Andrew Walbran15068b02022-03-22 15:57:34 +0000211 mov sp, x30
212
Andrew Walbrandfb73372022-04-21 10:52:27 +0000213 /* Set up exception vector. */
214 adr x30, vector_table_el1
215 msr vbar_el1, x30
216
Pierre-Clément Tosi67108c32023-06-30 11:04:02 +0000217 /*
218 * Set up Bionic-compatible thread-local storage.
219 *
220 * Note that TPIDR_EL0 can't be configured from rust_entry because the
221 * compiler will dereference it during function entry to access
222 * __stack_chk_guard and Rust doesn't support LLVM's
223 * __attribute__((no_stack_protector)).
224 */
David Brazdila51c6f02022-10-12 09:51:48 +0000225 adr_l x30, __bionic_tls
226 msr tpidr_el0, x30
227
Jakob Vukalovic586bb7c2023-01-22 19:38:39 +0000228 /* Randomize stack protector. */
229 rnd_reg x29
230 adr_l x30, __stack_chk_guard
231 str x29, [x30]
232
Jakob Vukalovicf8322ea2023-02-09 18:39:54 +0000233 /* Write a null byte to the top of the stack guard to act as a string terminator. */
234 strb wzr, [x30]
235
Andrew Walbran15068b02022-03-22 15:57:34 +0000236 /* Call into Rust code. */
Andrew Walbranb996b4a2022-04-22 15:15:41 +0000237 bl rust_entry
Andrew Walbran15068b02022-03-22 15:57:34 +0000238
239 /* Loop forever waiting for interrupts. */
Andrew Walbrana0ecabd2022-04-11 14:26:18 +00002404: wfi
241 b 4b
Jakob Vukalovic586bb7c2023-01-22 19:38:39 +0000242