From 7343505d9661ab6481e99fa96d60f2a8447a4565 Mon Sep 17 00:00:00 2001 From: Dimitris Papastamos Date: Tue, 2 Jan 2018 11:37:02 +0000 Subject: sp_min: Implement workaround for CVE-2017-5715 This patch introduces two workarounds for ARMv7 systems. The workarounds need to be applied prior to any `branch` instruction in secure world. This is achieved using a custom vector table where each entry is an `add sp, sp, #1` instruction. On entry to monitor mode, once the sequence of `ADD` instructions is executed, the branch target buffer (BTB) is invalidated. The bottom bits of `SP` are then used to decode the exception entry type. A side effect of this change is that the exception vectors are installed before the CPU specific reset function. This is now consistent with how it is done on AArch64. Note, on AArch32 systems, the exception vectors are typically tightly integrated with the secure payload (e.g. the Trusted OS). This workaround will need porting to each secure payload that requires it. The patch to modify the AArch32 per-cpu vbar to the corresponding workaround vector table according to the CPU type will be done in a later patch. Change-Id: I5786872497d359e496ebe0757e8017fa98f753fa Signed-off-by: Dimitris Papastamos --- include/common/aarch32/el3_common_macros.S | 24 ++++++++++++------------ include/lib/aarch32/arch.h | 4 +++- include/lib/aarch32/smcc_helpers.h | 13 ++++++++++--- 3 files changed, 25 insertions(+), 16 deletions(-) (limited to 'include') diff --git a/include/common/aarch32/el3_common_macros.S b/include/common/aarch32/el3_common_macros.S index 59e99f89a..74fb58296 100644 --- a/include/common/aarch32/el3_common_macros.S +++ b/include/common/aarch32/el3_common_macros.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -14,7 +14,7 @@ /* * Helper macro to initialise EL3 registers we care about. */ - .macro el3_arch_init_common _exception_vectors + .macro el3_arch_init_common /* --------------------------------------------------------------------- * SCTLR has already been initialised - read current value before * modifying. @@ -33,15 +33,6 @@ stcopr r0, SCTLR isb - /* --------------------------------------------------------------------- - * Set the exception vectors (VBAR/MVBAR). - * --------------------------------------------------------------------- - */ - ldr r0, =\_exception_vectors - stcopr r0, VBAR - stcopr r0, MVBAR - isb - /* --------------------------------------------------------------------- * Initialise SCR, setting all fields rather than relying on the hw. * @@ -210,6 +201,15 @@ bxne r0 .endif /* _warm_boot_mailbox */ + /* --------------------------------------------------------------------- + * Set the exception vectors (VBAR/MVBAR). + * --------------------------------------------------------------------- + */ + ldr r0, =\_exception_vectors + stcopr r0, VBAR + stcopr r0, MVBAR + isb + /* --------------------------------------------------------------------- * It is a cold boot. * Perform any processor specific actions upon reset e.g. cache, TLB @@ -218,7 +218,7 @@ */ bl reset_handler - el3_arch_init_common \_exception_vectors + el3_arch_init_common .if \_secondary_cold_boot /* ------------------------------------------------------------- diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h index 4d2a5fc9f..134d53468 100644 --- a/include/lib/aarch32/arch.h +++ b/include/lib/aarch32/arch.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -426,6 +426,8 @@ #define TLBIMVAA p15, 0, c8, c7, 3 #define TLBIMVAAIS p15, 0, c8, c3, 3 #define BPIALLIS p15, 0, c7, c1, 6 +#define BPIALL p15, 0, c7, c5, 6 +#define ICIALLU p15, 0, c7, c5, 0 #define HSCTLR p15, 4, c1, c0, 0 #define HCR p15, 4, c1, c1, 0 #define HCPTR p15, 4, c1, c1, 2 diff --git a/include/lib/aarch32/smcc_helpers.h b/include/lib/aarch32/smcc_helpers.h index 53f1aa4ab..ed3b722fe 100644 --- a/include/lib/aarch32/smcc_helpers.h +++ b/include/lib/aarch32/smcc_helpers.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -22,7 +22,7 @@ #define SMC_CTX_LR_MON 0x80 #define SMC_CTX_SCR 0x84 #define SMC_CTX_PMCR 0x88 -#define SMC_CTX_SIZE 0x8C +#define SMC_CTX_SIZE 0x90 #ifndef __ASSEMBLY__ #include @@ -75,7 +75,13 @@ typedef struct smc_ctx { u_register_t lr_mon; u_register_t scr; u_register_t pmcr; -} smc_ctx_t; + /* + * The workaround for CVE-2017-5715 requires storing information in + * the bottom 3 bits of the stack pointer. Add a padding field to + * force the size of the struct to be a multiple of 8. + */ + u_register_t pad; +} smc_ctx_t __aligned(8); /* * Compile time assertions related to the 'smc_context' structure to @@ -99,6 +105,7 @@ CASSERT(SMC_CTX_LR_MON == __builtin_offsetof(smc_ctx_t, lr_mon), \ CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \ assert_smc_ctx_spsr_mon_offset_mismatch); +CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned); CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch); /* Convenience macros to return from SMC handler */ -- cgit v1.2.3 From e4b34efa18f1cac10aa8541bc0a1dbab49886009 Mon Sep 17 00:00:00 2001 From: Dimitris Papastamos Date: Wed, 3 Jan 2018 10:48:59 +0000 Subject: Workaround for CVE-2017-5715 for Cortex A9, A15 and A17 A per-cpu vbar is installed that implements the workaround by invalidating the branch target buffer (BTB) directly in the case of A9 and A17 and indirectly by invalidating the icache in the case of A15. For Cortex A57 and A72 there is currently no workaround implemented when EL3 is in AArch32 mode so report it as missing. For other vulnerable CPUs (e.g. Cortex A73 and Cortex A75), there are no changes since there is currently no upstream AArch32 EL3 support for these CPUs. Change-Id: Ib42c6ef0b3c9ff2878a9e53839de497ff736258f Signed-off-by: Dimitris Papastamos --- include/lib/cpus/aarch32/cortex_a15.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'include') diff --git a/include/lib/cpus/aarch32/cortex_a15.h b/include/lib/cpus/aarch32/cortex_a15.h index 905c139da..0f01a4336 100644 --- a/include/lib/cpus/aarch32/cortex_a15.h +++ b/include/lib/cpus/aarch32/cortex_a15.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -15,6 +15,7 @@ /******************************************************************************* * CPU Auxiliary Control register specific definitions. ******************************************************************************/ +#define CORTEX_A15_ACTLR_INV_BTB_BIT (1 << 0) #define CORTEX_A15_ACTLR_SMP_BIT (1 << 6) #endif /* __CORTEX_A15_H__ */ -- cgit v1.2.3