244 lines
6.3 KiB
ArmAsm
244 lines
6.3 KiB
ArmAsm
/*
|
|
* Copyright 2022 The Android Open Source Project
|
|
*
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
* you may not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* https://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
* See the License for the specific language governing permissions and
|
|
* limitations under the License.
|
|
*/
|
|
|
|
#include <common.h>
|
|
|
|
.set .L_MAIR_DEV_nGnRE, 0x04
|
|
.set .L_MAIR_MEM_WBWA, 0xff
|
|
.set .Lmairval, .L_MAIR_DEV_nGnRE | (.L_MAIR_MEM_WBWA << 8)
|
|
|
|
/* 4 KiB granule size for TTBR0_EL1. */
|
|
.set .L_TCR_TG0_4KB, 0x0 << 14
|
|
/* 4 KiB granule size for TTBR1_EL1. */
|
|
.set .L_TCR_TG1_4KB, 0x2 << 30
|
|
/* Disable translation table walk for TTBR1_EL1, generating a translation fault instead. */
|
|
.set .L_TCR_EPD1, 0x1 << 23
|
|
/* Translation table walks for TTBR0_EL1 are inner sharable. */
|
|
.set .L_TCR_SH_INNER, 0x3 << 12
|
|
/*
|
|
* Translation table walks for TTBR0_EL1 are outer write-back read-allocate write-allocate
|
|
* cacheable.
|
|
*/
|
|
.set .L_TCR_RGN_OWB, 0x1 << 10
|
|
/*
|
|
* Translation table walks for TTBR0_EL1 are inner write-back read-allocate write-allocate
|
|
* cacheable.
|
|
*/
|
|
.set .L_TCR_RGN_IWB, 0x1 << 8
|
|
/* Size offset for TTBR0_EL1 is 2**39 bytes (512 GiB). */
|
|
.set .L_TCR_T0SZ_512, 64 - 39
|
|
.set .Ltcrval, .L_TCR_TG0_4KB | .L_TCR_TG1_4KB | .L_TCR_EPD1 | .L_TCR_RGN_OWB
|
|
.set .Ltcrval, .Ltcrval | .L_TCR_RGN_IWB | .L_TCR_SH_INNER | .L_TCR_T0SZ_512
|
|
|
|
/* Stage 1 instruction access cacheability is unaffected. */
|
|
.set .L_SCTLR_ELx_I, 0x1 << 12
|
|
/* SP alignment fault if SP is not aligned to a 16 byte boundary. */
|
|
.set .L_SCTLR_ELx_SA, 0x1 << 3
|
|
/* Stage 1 data access cacheability is unaffected. */
|
|
.set .L_SCTLR_ELx_C, 0x1 << 2
|
|
/* EL0 and EL1 stage 1 MMU enabled. */
|
|
.set .L_SCTLR_ELx_M, 0x1 << 0
|
|
/* Privileged Access Never is unchanged on taking an exception to EL1. */
|
|
.set .L_SCTLR_EL1_SPAN, 0x1 << 23
|
|
/* All writable memory regions are treated as XN. */
|
|
.set .L_SCTLR_EL1_WXN, 0x1 << 19
|
|
/* SETEND instruction disabled at EL0 in aarch32 mode. */
|
|
.set .L_SCTLR_EL1_SED, 0x1 << 8
|
|
/* Various IT instructions are disabled at EL0 in aarch32 mode. */
|
|
.set .L_SCTLR_EL1_ITD, 0x1 << 7
|
|
.set .L_SCTLR_EL1_RES1, (0x1 << 11) | (0x1 << 20) | (0x1 << 22) | (0x1 << 28) | (0x1 << 29)
|
|
.set .Lsctlrval, .L_SCTLR_ELx_M | .L_SCTLR_ELx_C | .L_SCTLR_ELx_SA | .L_SCTLR_EL1_ITD | .L_SCTLR_EL1_SED
|
|
.set .Lsctlrval, .Lsctlrval | .L_SCTLR_ELx_I | .L_SCTLR_EL1_SPAN | .L_SCTLR_EL1_RES1 | .L_SCTLR_EL1_WXN
|
|
|
|
/* SMC function IDs */
|
|
.set .L_SMCCC_VERSION_ID, 0x80000000
|
|
.set .L_SMCCC_TRNG_VERSION_ID, 0x84000050
|
|
.set .L_SMCCC_TRNG_FEATURES_ID, 0x84000051
|
|
.set .L_SMCCC_TRNG_RND64_ID, 0xc4000053
|
|
|
|
/* SMC function versions */
|
|
.set .L_SMCCC_VERSION_1_1, 0x0101
|
|
.set .L_SMCCC_TRNG_VERSION_1_0, 0x0100
|
|
|
|
/* Bionic-compatible stack protector */
|
|
.section .data.stack_protector, "aw"
|
|
__bionic_tls:
|
|
.zero 40
|
|
.global __stack_chk_guard
|
|
__stack_chk_guard:
|
|
.quad 0
|
|
|
|
/**
|
|
* This macro stores a random value into a register.
|
|
* If a TRNG backed is not present or if an error occurs, the value remains unchanged.
|
|
*/
|
|
.macro rnd_reg reg:req
|
|
mov x20, x0
|
|
mov x21, x1
|
|
mov x22, x2
|
|
mov x23, x3
|
|
|
|
/* Verify SMCCC version >=1.1 */
|
|
hvc_call .L_SMCCC_VERSION_ID
|
|
cmp w0, 0
|
|
b.lt 100f
|
|
cmp w0, .L_SMCCC_VERSION_1_1
|
|
b.lt 100f
|
|
|
|
/* Verify TRNG ABI version 1.x */
|
|
hvc_call .L_SMCCC_TRNG_VERSION_ID
|
|
cmp w0, 0
|
|
b.lt 100f
|
|
cmp w0, .L_SMCCC_TRNG_VERSION_1_0
|
|
b.lt 100f
|
|
|
|
/* Call TRNG_FEATURES, ensure TRNG_RND is implemented */
|
|
mov_i x1, .L_SMCCC_TRNG_RND64_ID
|
|
hvc_call .L_SMCCC_TRNG_FEATURES_ID
|
|
cmp w0, 0
|
|
b.lt 100f
|
|
|
|
/* Call TRNG_RND, request 64 bits of entropy */
|
|
mov x1, #64
|
|
hvc_call .L_SMCCC_TRNG_RND64_ID
|
|
cmp x0, 0
|
|
b.lt 100f
|
|
|
|
mov \reg, x3
|
|
b 101f
|
|
|
|
100:
|
|
reset_or_hang
|
|
101:
|
|
mov x0, x20
|
|
mov x1, x21
|
|
mov x2, x22
|
|
mov x3, x23
|
|
.endm
|
|
|
|
/**
|
|
* This is a generic entry point for an image. It carries out the operations required to prepare the
|
|
* loaded image to be run. Specifically, it zeroes the bss section using registers x25 and above,
|
|
* prepares the stack, enables floating point, and sets up the exception vector. It preserves x0-x3
|
|
* for the Rust entry point, as these may contain boot parameters.
|
|
*/
|
|
.section .init.entry, "ax"
|
|
.global entry
|
|
entry:
|
|
/* Load and apply the memory management configuration, ready to enable MMU and caches. */
|
|
|
|
adr x30, vector_table_panic
|
|
msr vbar_el1, x30
|
|
|
|
/*
|
|
* Our load address is set by the host so validate it before proceeding.
|
|
*/
|
|
adr x30, entry
|
|
mov_i x29, entry
|
|
cmp x29, x30
|
|
b.eq 1f
|
|
reset_or_hang
|
|
1:
|
|
|
|
adrp x30, idmap
|
|
msr ttbr0_el1, x30
|
|
|
|
mov_i x30, .Lmairval
|
|
msr mair_el1, x30
|
|
|
|
mov_i x30, .Ltcrval
|
|
/* Copy the supported PA range into TCR_EL1.IPS. */
|
|
mrs x29, id_aa64mmfr0_el1
|
|
bfi x30, x29, #32, #4
|
|
|
|
msr tcr_el1, x30
|
|
|
|
mov_i x30, .Lsctlrval
|
|
|
|
/*
|
|
* Ensure everything before this point has completed, then invalidate any potentially stale
|
|
* local TLB entries before they start being used.
|
|
*/
|
|
isb
|
|
tlbi vmalle1
|
|
ic iallu
|
|
dsb nsh
|
|
isb
|
|
|
|
/*
|
|
* Configure sctlr_el1 to enable MMU and cache and don't proceed until this has completed.
|
|
*/
|
|
msr sctlr_el1, x30
|
|
isb
|
|
|
|
/* Disable trapping floating point access in EL1. */
|
|
mrs x30, cpacr_el1
|
|
orr x30, x30, #(0x3 << 20)
|
|
msr cpacr_el1, x30
|
|
isb
|
|
|
|
/* Zero out the bss section. */
|
|
adr_l x29, bss_begin
|
|
adr_l x30, bss_end
|
|
0: cmp x29, x30
|
|
b.hs 1f
|
|
stp xzr, xzr, [x29], #16
|
|
b 0b
|
|
|
|
1: /* Copy the data section. */
|
|
adr_l x28, data_begin
|
|
adr_l x29, data_end
|
|
adr_l x30, data_lma
|
|
2: cmp x28, x29
|
|
b.ge 3f
|
|
ldp q0, q1, [x30], #32
|
|
stp q0, q1, [x28], #32
|
|
b 2b
|
|
|
|
3: /* Prepare the exception handler stack (SP_EL1). */
|
|
adr_l x30, init_eh_stack_pointer
|
|
msr spsel, #1
|
|
mov sp, x30
|
|
|
|
/* Prepare the main thread stack (SP_EL0). */
|
|
adr_l x30, init_stack_pointer
|
|
msr spsel, #0
|
|
mov sp, x30
|
|
|
|
/* Set up exception vector. */
|
|
adr x30, vector_table_el1
|
|
msr vbar_el1, x30
|
|
|
|
/* Set up Bionic-compatible thread-local storage. */
|
|
adr_l x30, __bionic_tls
|
|
msr tpidr_el0, x30
|
|
|
|
/* Randomize stack protector. */
|
|
rnd_reg x29
|
|
adr_l x30, __stack_chk_guard
|
|
str x29, [x30]
|
|
|
|
/* Write a null byte to the top of the stack guard to act as a string terminator. */
|
|
strb wzr, [x30]
|
|
|
|
/* Call into Rust code. */
|
|
bl rust_entry
|
|
|
|
/* Loop forever waiting for interrupts. */
|
|
4: wfi
|
|
b 4b
|
|
|