aboutsummaryrefslogtreecommitdiffstats
path: root/bl31/aarch64/runtime_exceptions.S
diff options
context:
space:
mode:
authorManish V Badarkhe <Manish.Badarkhe@arm.com>2020-07-23 12:43:25 +0100
committerManish V Badarkhe <Manish.Badarkhe@arm.com>2020-08-18 10:49:27 +0100
commit3b8456bd1c9fd2303483f0675786e3fbda81a0af (patch)
treef9667f04051d19ce5cc4bd6585657a3d753e96fe /bl31/aarch64/runtime_exceptions.S
parentcb55615c506d6bff3f1a9223182e190abbbf6fc5 (diff)
downloadplatform_external_arm-trusted-firmware-3b8456bd1c9fd2303483f0675786e3fbda81a0af.tar.gz
platform_external_arm-trusted-firmware-3b8456bd1c9fd2303483f0675786e3fbda81a0af.tar.bz2
platform_external_arm-trusted-firmware-3b8456bd1c9fd2303483f0675786e3fbda81a0af.zip
runtime_exceptions: Update AT speculative workaround
As per latest mailing communication [1], we decided to update AT speculative workaround implementation in order to disable page table walk for lower ELs(EL1 or EL0) immediately after context switching to EL3 from lower ELs. Previous implementation of AT speculative workaround is available here: 45aecff00 AT speculative workaround is updated as below: 1. Avoid saving and restoring of SCTLR and TCR registers for EL1 in context save and restore routine respectively. 2. On EL3 entry, save SCTLR and TCR registers for EL1. 3. On EL3 entry, update EL1 system registers to disable stage 1 page table walk for lower ELs (EL1 and EL0) and enable EL1 MMU. 4. On EL3 exit, restore SCTLR and TCR registers for EL1 which are saved in step 2. [1]: https://lists.trustedfirmware.org/pipermail/tf-a/2020-July/000586.html Change-Id: Iee8de16f81dc970a8f492726f2ddd57e7bd9ffb5 Signed-off-by: Manish V Badarkhe <Manish.Badarkhe@arm.com>
Diffstat (limited to 'bl31/aarch64/runtime_exceptions.S')
-rw-r--r--bl31/aarch64/runtime_exceptions.S11
1 files changed, 11 insertions, 0 deletions
diff --git a/bl31/aarch64/runtime_exceptions.S b/bl31/aarch64/runtime_exceptions.S
index 5b3738868..bfe13f312 100644
--- a/bl31/aarch64/runtime_exceptions.S
+++ b/bl31/aarch64/runtime_exceptions.S
@@ -12,6 +12,7 @@
#include <bl31/interrupt_mgmt.h>
#include <common/runtime_svc.h>
#include <context.h>
+#include <el3_common_macros.S>
#include <lib/el3_runtime/cpu_data.h>
#include <lib/smccc.h>
@@ -285,21 +286,25 @@ vector_entry sync_exception_aarch64
* to a valid cpu context where the general purpose and system register
* state can be saved.
*/
+ apply_at_speculative_wa
check_and_unmask_ea
handle_sync_exception
end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64
+ apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception irq_aarch64
end_vector_entry irq_aarch64
vector_entry fiq_aarch64
+ apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception fiq_aarch64
end_vector_entry fiq_aarch64
vector_entry serror_aarch64
+ apply_at_speculative_wa
msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
end_vector_entry serror_aarch64
@@ -315,21 +320,25 @@ vector_entry sync_exception_aarch32
* to a valid cpu context where the general purpose and system register
* state can be saved.
*/
+ apply_at_speculative_wa
check_and_unmask_ea
handle_sync_exception
end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32
+ apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception irq_aarch32
end_vector_entry irq_aarch32
vector_entry fiq_aarch32
+ apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception fiq_aarch32
end_vector_entry fiq_aarch32
vector_entry serror_aarch32
+ apply_at_speculative_wa
msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
end_vector_entry serror_aarch32
@@ -455,6 +464,8 @@ smc_unknown:
b el3_exit
smc_prohibited:
+ restore_ptw_el1_sys_regs
+ ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov x0, #SMC_UNK
exception_return