diff options
author | Dimitris Papastamos <dimitris.papastamos@arm.com> | 2018-05-29 09:28:05 +0100 |
---|---|---|
committer | Anson Huang <Anson.Huang@nxp.com> | 2018-06-21 13:29:21 +0800 |
commit | 770f853dcb47ad856c060ffc6fefd626ae40e52c (patch) | |
tree | 89e973cc90a4341ebdec51f4e26805fa1a81319e /lib/cpus/aarch64/wa_cve_2017_5715_mmu.S | |
parent | 959392911781f3bd4263a01d703097998d0aaa0f (diff) |
Merge pull request #1392 from dp-arm/dp/cve_2018_3639
Implement workaround for CVE-2018-3639 on Cortex A57/A72/A73 and A75
Conflicts:
services/arm_arch_svc/arm_arch_svc_setup.c
Diffstat (limited to 'lib/cpus/aarch64/wa_cve_2017_5715_mmu.S')
-rw-r--r-- | lib/cpus/aarch64/wa_cve_2017_5715_mmu.S | 148 |
1 files changed, 148 insertions, 0 deletions
diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S new file mode 100644 index 00000000..039e373c --- /dev/null +++ b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include <arch.h> +#include <arm_arch_svc.h> +#include <asm_macros.S> +#include <context.h> + + .globl wa_cve_2017_5715_mmu_vbar + +#define ESR_EL3_A64_SMC0 0x5e000000 + +vector_base wa_cve_2017_5715_mmu_vbar + + .macro apply_cve_2017_5715_wa _is_sync_exception + stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] + mrs x1, sctlr_el3 + /* Disable MMU */ + bic x1, x1, #SCTLR_M_BIT + msr sctlr_el3, x1 + isb + /* Enable MMU */ + orr x1, x1, #SCTLR_M_BIT + msr sctlr_el3, x1 + /* + * Defer ISB to avoid synchronizing twice in case we hit + * the workaround SMC call which will implicitly synchronize + * because of the ERET instruction. + */ + + /* + * Ensure SMC is coming from A64 state on #0 + * with W0 = SMCCC_ARCH_WORKAROUND_1 + * + * This sequence evaluates as: + * (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE) + * allowing use of a single branch operation + */ + .if \_is_sync_exception + orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1 + cmp w0, w1 + mrs x0, esr_el3 + mov_imm w1, ESR_EL3_A64_SMC0 + ccmp w0, w1, #0, eq + /* Static predictor will predict a fall through */ + bne 1f + eret +1: + .endif + + /* + * Synchronize now to enable the MMU. This is required + * to ensure the load pair below reads the data stored earlier. + */ + isb + ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] + .endm + + /* --------------------------------------------------------------------- + * Current EL with SP_EL0 : 0x0 - 0x200 + * --------------------------------------------------------------------- + */ +vector_entry mmu_sync_exception_sp_el0 + b sync_exception_sp_el0 + check_vector_size mmu_sync_exception_sp_el0 + +vector_entry mmu_irq_sp_el0 + b irq_sp_el0 + check_vector_size mmu_irq_sp_el0 + +vector_entry mmu_fiq_sp_el0 + b fiq_sp_el0 + check_vector_size mmu_fiq_sp_el0 + +vector_entry mmu_serror_sp_el0 + b serror_sp_el0 + check_vector_size mmu_serror_sp_el0 + + /* --------------------------------------------------------------------- + * Current EL with SP_ELx: 0x200 - 0x400 + * --------------------------------------------------------------------- + */ +vector_entry mmu_sync_exception_sp_elx + b sync_exception_sp_elx + check_vector_size mmu_sync_exception_sp_elx + +vector_entry mmu_irq_sp_elx + b irq_sp_elx + check_vector_size mmu_irq_sp_elx + +vector_entry mmu_fiq_sp_elx + b fiq_sp_elx + check_vector_size mmu_fiq_sp_elx + +vector_entry mmu_serror_sp_elx + b serror_sp_elx + check_vector_size mmu_serror_sp_elx + + /* --------------------------------------------------------------------- + * Lower EL using AArch64 : 0x400 - 0x600 + * --------------------------------------------------------------------- + */ +vector_entry mmu_sync_exception_aarch64 + apply_cve_2017_5715_wa _is_sync_exception=1 + b sync_exception_aarch64 + check_vector_size mmu_sync_exception_aarch64 + +vector_entry mmu_irq_aarch64 + apply_cve_2017_5715_wa _is_sync_exception=0 + b irq_aarch64 + check_vector_size mmu_irq_aarch64 + +vector_entry mmu_fiq_aarch64 + apply_cve_2017_5715_wa _is_sync_exception=0 + b fiq_aarch64 + check_vector_size mmu_fiq_aarch64 + +vector_entry mmu_serror_aarch64 + apply_cve_2017_5715_wa _is_sync_exception=0 + b serror_aarch64 + check_vector_size mmu_serror_aarch64 + + /* --------------------------------------------------------------------- + * Lower EL using AArch32 : 0x600 - 0x800 + * --------------------------------------------------------------------- + */ +vector_entry mmu_sync_exception_aarch32 + apply_cve_2017_5715_wa _is_sync_exception=1 + b sync_exception_aarch32 + check_vector_size mmu_sync_exception_aarch32 + +vector_entry mmu_irq_aarch32 + apply_cve_2017_5715_wa _is_sync_exception=0 + b irq_aarch32 + check_vector_size mmu_irq_aarch32 + +vector_entry mmu_fiq_aarch32 + apply_cve_2017_5715_wa _is_sync_exception=0 + b fiq_aarch32 + check_vector_size mmu_fiq_aarch32 + +vector_entry mmu_serror_aarch32 + apply_cve_2017_5715_wa _is_sync_exception=0 + b serror_aarch32 + check_vector_size mmu_serror_aarch32 |