Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2022 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * https://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Pierre-Clément Tosi | e328f4a | 2022-10-27 11:43:10 +0100 | [diff] [blame] | 17 | #include <common.h> |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 18 | |
| 19 | .set .L_MAIR_DEV_nGnRE, 0x04 |
| 20 | .set .L_MAIR_MEM_WBWA, 0xff |
| 21 | .set .Lmairval, .L_MAIR_DEV_nGnRE | (.L_MAIR_MEM_WBWA << 8) |
| 22 | |
| 23 | /* 4 KiB granule size for TTBR0_EL1. */ |
| 24 | .set .L_TCR_TG0_4KB, 0x0 << 14 |
| 25 | /* 4 KiB granule size for TTBR1_EL1. */ |
| 26 | .set .L_TCR_TG1_4KB, 0x2 << 30 |
| 27 | /* Disable translation table walk for TTBR1_EL1, generating a translation fault instead. */ |
| 28 | .set .L_TCR_EPD1, 0x1 << 23 |
| 29 | /* Translation table walks for TTBR0_EL1 are inner sharable. */ |
| 30 | .set .L_TCR_SH_INNER, 0x3 << 12 |
| 31 | /* |
| 32 | * Translation table walks for TTBR0_EL1 are outer write-back read-allocate write-allocate |
| 33 | * cacheable. |
| 34 | */ |
| 35 | .set .L_TCR_RGN_OWB, 0x1 << 10 |
| 36 | /* |
| 37 | * Translation table walks for TTBR0_EL1 are inner write-back read-allocate write-allocate |
| 38 | * cacheable. |
| 39 | */ |
| 40 | .set .L_TCR_RGN_IWB, 0x1 << 8 |
| 41 | /* Size offset for TTBR0_EL1 is 2**39 bytes (512 GiB). */ |
| 42 | .set .L_TCR_T0SZ_512, 64 - 39 |
| 43 | .set .Ltcrval, .L_TCR_TG0_4KB | .L_TCR_TG1_4KB | .L_TCR_EPD1 | .L_TCR_RGN_OWB |
| 44 | .set .Ltcrval, .Ltcrval | .L_TCR_RGN_IWB | .L_TCR_SH_INNER | .L_TCR_T0SZ_512 |
| 45 | |
| 46 | /* Stage 1 instruction access cacheability is unaffected. */ |
| 47 | .set .L_SCTLR_ELx_I, 0x1 << 12 |
| 48 | /* SP alignment fault if SP is not aligned to a 16 byte boundary. */ |
| 49 | .set .L_SCTLR_ELx_SA, 0x1 << 3 |
| 50 | /* Stage 1 data access cacheability is unaffected. */ |
| 51 | .set .L_SCTLR_ELx_C, 0x1 << 2 |
| 52 | /* EL0 and EL1 stage 1 MMU enabled. */ |
| 53 | .set .L_SCTLR_ELx_M, 0x1 << 0 |
| 54 | /* Privileged Access Never is unchanged on taking an exception to EL1. */ |
| 55 | .set .L_SCTLR_EL1_SPAN, 0x1 << 23 |
Andrew Walbran | a0ecabd | 2022-04-11 14:26:18 +0000 | [diff] [blame] | 56 | /* All writable memory regions are treated as XN. */ |
| 57 | .set .L_SCTLR_EL1_WXN, 0x1 << 19 |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 58 | /* SETEND instruction disabled at EL0 in aarch32 mode. */ |
| 59 | .set .L_SCTLR_EL1_SED, 0x1 << 8 |
| 60 | /* Various IT instructions are disabled at EL0 in aarch32 mode. */ |
| 61 | .set .L_SCTLR_EL1_ITD, 0x1 << 7 |
| 62 | .set .L_SCTLR_EL1_RES1, (0x1 << 11) | (0x1 << 20) | (0x1 << 22) | (0x1 << 28) | (0x1 << 29) |
| 63 | .set .Lsctlrval, .L_SCTLR_ELx_M | .L_SCTLR_ELx_C | .L_SCTLR_ELx_SA | .L_SCTLR_EL1_ITD | .L_SCTLR_EL1_SED |
Andrew Walbran | a0ecabd | 2022-04-11 14:26:18 +0000 | [diff] [blame] | 64 | .set .Lsctlrval, .Lsctlrval | .L_SCTLR_ELx_I | .L_SCTLR_EL1_SPAN | .L_SCTLR_EL1_RES1 | .L_SCTLR_EL1_WXN |
| 65 | |
Jakob Vukalovic | 586bb7c | 2023-01-22 19:38:39 +0000 | [diff] [blame] | 66 | /* SMC function IDs */ |
| 67 | .set .L_SMCCC_VERSION_ID, 0x80000000 |
| 68 | .set .L_SMCCC_TRNG_VERSION_ID, 0x84000050 |
| 69 | .set .L_SMCCC_TRNG_FEATURES_ID, 0x84000051 |
| 70 | .set .L_SMCCC_TRNG_RND64_ID, 0xc4000053 |
| 71 | |
| 72 | /* SMC function versions */ |
| 73 | .set .L_SMCCC_VERSION_1_1, 0x0101 |
| 74 | .set .L_SMCCC_TRNG_VERSION_1_0, 0x0100 |
| 75 | |
David Brazdil | a51c6f0 | 2022-10-12 09:51:48 +0000 | [diff] [blame] | 76 | /* Bionic-compatible stack protector */ |
| 77 | .section .data.stack_protector, "aw" |
| 78 | __bionic_tls: |
| 79 | .zero 40 |
| 80 | .global __stack_chk_guard |
| 81 | __stack_chk_guard: |
Jakob Vukalovic | 586bb7c | 2023-01-22 19:38:39 +0000 | [diff] [blame] | 82 | .quad 0 |
| 83 | |
| 84 | /** |
| 85 | * This macro stores a random value into a register. |
| 86 | * If a TRNG backed is not present or if an error occurs, the value remains unchanged. |
| 87 | */ |
| 88 | .macro rnd_reg reg:req |
| 89 | mov x20, x0 |
| 90 | mov x21, x1 |
| 91 | mov x22, x2 |
| 92 | mov x23, x3 |
| 93 | |
| 94 | /* Verify SMCCC version >=1.1 */ |
| 95 | hvc_call .L_SMCCC_VERSION_ID |
| 96 | cmp w0, 0 |
| 97 | b.lt 100f |
| 98 | cmp w0, .L_SMCCC_VERSION_1_1 |
| 99 | b.lt 100f |
| 100 | |
| 101 | /* Verify TRNG ABI version 1.x */ |
| 102 | hvc_call .L_SMCCC_TRNG_VERSION_ID |
| 103 | cmp w0, 0 |
| 104 | b.lt 100f |
| 105 | cmp w0, .L_SMCCC_TRNG_VERSION_1_0 |
| 106 | b.lt 100f |
| 107 | |
| 108 | /* Call TRNG_FEATURES, ensure TRNG_RND is implemented */ |
| 109 | mov_i x1, .L_SMCCC_TRNG_RND64_ID |
| 110 | hvc_call .L_SMCCC_TRNG_FEATURES_ID |
| 111 | cmp w0, 0 |
| 112 | b.lt 100f |
| 113 | |
| 114 | /* Call TRNG_RND, request 64 bits of entropy */ |
| 115 | mov x1, #64 |
| 116 | hvc_call .L_SMCCC_TRNG_RND64_ID |
| 117 | cmp x0, 0 |
| 118 | b.lt 100f |
| 119 | |
| 120 | mov \reg, x3 |
| 121 | b 101f |
| 122 | |
| 123 | 100: |
| 124 | reset_or_hang |
| 125 | 101: |
| 126 | mov x0, x20 |
| 127 | mov x1, x21 |
| 128 | mov x2, x22 |
| 129 | mov x3, x23 |
| 130 | .endm |
David Brazdil | a51c6f0 | 2022-10-12 09:51:48 +0000 | [diff] [blame] | 131 | |
Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 132 | /** |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 133 | * This is a generic entry point for an image. It carries out the operations required to prepare the |
| 134 | * loaded image to be run. Specifically, it zeroes the bss section using registers x25 and above, |
| 135 | * prepares the stack, enables floating point, and sets up the exception vector. It preserves x0-x3 |
| 136 | * for the Rust entry point, as these may contain boot parameters. |
Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 137 | */ |
| 138 | .section .init.entry, "ax" |
| 139 | .global entry |
| 140 | entry: |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 141 | /* Load and apply the memory management configuration, ready to enable MMU and caches. */ |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 142 | |
Pierre-Clément Tosi | d40ff91 | 2022-06-30 16:11:33 +0100 | [diff] [blame] | 143 | adr x30, vector_table_panic |
| 144 | msr vbar_el1, x30 |
| 145 | |
Pierre-Clément Tosi | 2cba973 | 2022-10-26 20:38:51 +0100 | [diff] [blame] | 146 | /* |
| 147 | * Our load address is set by the host so validate it before proceeding. |
| 148 | */ |
| 149 | adr x30, entry |
| 150 | mov_i x29, entry |
| 151 | cmp x29, x30 |
| 152 | b.eq 1f |
| 153 | reset_or_hang |
| 154 | 1: |
| 155 | |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 156 | adrp x30, idmap |
| 157 | msr ttbr0_el1, x30 |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 158 | |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 159 | mov_i x30, .Lmairval |
| 160 | msr mair_el1, x30 |
| 161 | |
| 162 | mov_i x30, .Ltcrval |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 163 | /* Copy the supported PA range into TCR_EL1.IPS. */ |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 164 | mrs x29, id_aa64mmfr0_el1 |
| 165 | bfi x30, x29, #32, #4 |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 166 | |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 167 | msr tcr_el1, x30 |
| 168 | |
| 169 | mov_i x30, .Lsctlrval |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 170 | |
| 171 | /* |
| 172 | * Ensure everything before this point has completed, then invalidate any potentially stale |
| 173 | * local TLB entries before they start being used. |
| 174 | */ |
| 175 | isb |
| 176 | tlbi vmalle1 |
| 177 | ic iallu |
| 178 | dsb nsh |
| 179 | isb |
| 180 | |
| 181 | /* |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 182 | * Configure sctlr_el1 to enable MMU and cache and don't proceed until this has completed. |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 183 | */ |
Andrew Walbran | e03395a | 2022-04-29 15:15:49 +0000 | [diff] [blame] | 184 | msr sctlr_el1, x30 |
Andrew Walbran | 267f6c1 | 2022-03-24 11:26:36 +0000 | [diff] [blame] | 185 | isb |
| 186 | |
Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 187 | /* Disable trapping floating point access in EL1. */ |
| 188 | mrs x30, cpacr_el1 |
| 189 | orr x30, x30, #(0x3 << 20) |
| 190 | msr cpacr_el1, x30 |
| 191 | isb |
| 192 | |
| 193 | /* Zero out the bss section. */ |
| 194 | adr_l x29, bss_begin |
| 195 | adr_l x30, bss_end |
| 196 | 0: cmp x29, x30 |
| 197 | b.hs 1f |
| 198 | stp xzr, xzr, [x29], #16 |
| 199 | b 0b |
| 200 | |
Andrew Walbran | a0ecabd | 2022-04-11 14:26:18 +0000 | [diff] [blame] | 201 | 1: /* Copy the data section. */ |
| 202 | adr_l x28, data_begin |
| 203 | adr_l x29, data_end |
| 204 | adr_l x30, data_lma |
| 205 | 2: cmp x28, x29 |
| 206 | b.ge 3f |
| 207 | ldp q0, q1, [x30], #32 |
| 208 | stp q0, q1, [x28], #32 |
| 209 | b 2b |
| 210 | |
| 211 | 3: /* Prepare the stack. */ |
| 212 | adr_l x30, boot_stack_end |
Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 213 | mov sp, x30 |
| 214 | |
Andrew Walbran | dfb7337 | 2022-04-21 10:52:27 +0000 | [diff] [blame] | 215 | /* Set up exception vector. */ |
| 216 | adr x30, vector_table_el1 |
| 217 | msr vbar_el1, x30 |
| 218 | |
David Brazdil | a51c6f0 | 2022-10-12 09:51:48 +0000 | [diff] [blame] | 219 | /* Set up Bionic-compatible thread-local storage. */ |
| 220 | adr_l x30, __bionic_tls |
| 221 | msr tpidr_el0, x30 |
| 222 | |
Jakob Vukalovic | 586bb7c | 2023-01-22 19:38:39 +0000 | [diff] [blame] | 223 | /* Randomize stack protector. */ |
| 224 | rnd_reg x29 |
| 225 | adr_l x30, __stack_chk_guard |
| 226 | str x29, [x30] |
| 227 | |
Jakob Vukalovic | f8322ea | 2023-02-09 18:39:54 +0000 | [diff] [blame] | 228 | /* Write a null byte to the top of the stack guard to act as a string terminator. */ |
| 229 | strb wzr, [x30] |
| 230 | |
Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 231 | /* Call into Rust code. */ |
Andrew Walbran | b996b4a | 2022-04-22 15:15:41 +0000 | [diff] [blame] | 232 | bl rust_entry |
Andrew Walbran | 15068b0 | 2022-03-22 15:57:34 +0000 | [diff] [blame] | 233 | |
| 234 | /* Loop forever waiting for interrupts. */ |
Andrew Walbran | a0ecabd | 2022-04-11 14:26:18 +0000 | [diff] [blame] | 235 | 4: wfi |
| 236 | b 4b |
Jakob Vukalovic | 586bb7c | 2023-01-22 19:38:39 +0000 | [diff] [blame] | 237 | |