aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorEtienne Carriere <etienne.carriere@linaro.org>2017-11-08 14:38:33 +0100
committerEtienne Carriere <etienne.carriere@linaro.org>2017-11-08 14:38:33 +0100
commit64cc6e91e819ccbf5fe3bf8a5c177b8fa8012d8c (patch)
tree6cc714edf449c3e44fde7840598d30089edd9f57 /include
parent51b992ecec92b9dcca410a2c3716f45daca5afd1 (diff)
downloadplatform_external_arm-trusted-firmware-64cc6e91e819ccbf5fe3bf8a5c177b8fa8012d8c.tar.gz
platform_external_arm-trusted-firmware-64cc6e91e819ccbf5fe3bf8a5c177b8fa8012d8c.tar.bz2
platform_external_arm-trusted-firmware-64cc6e91e819ccbf5fe3bf8a5c177b8fa8012d8c.zip
ARMv7 may not support Virtualization Extensions
ARMv7-A Virtualization extensions brings new instructions and resources that were supported by later architectures. Reference ARM ARM Issue C.c [DDI0406C_C]. ERET and extended MSR/MRS instructions, as specified in [DDI0406C_C] in ID_PFR1 description of bits[15:12] (Virtualization Extensions): A value of 0b0001 implies implementation of the HVC, ERET, MRS (Banked register), and MSR (Banked register) instructions. The ID_ISARs do not identify whether these instructions are implemented. UDIV/SDIV were introduced with the Virtualization extensions, even if not strictly related to the virtualization extensions. If ARMv7 based platform does not set ARM_CORTEX_Ax=yes, platform shall define ARMV7_SUPPORTS_VIRTUALIZATION to enable virtualization extension related resources. Signed-off-by: Etienne Carriere <etienne.carriere@linaro.org>
Diffstat (limited to 'include')
-rw-r--r--include/common/aarch32/asm_macros.S10
-rw-r--r--include/lib/aarch32/smcc_macros.S80
2 files changed, 89 insertions, 1 deletions
diff --git a/include/common/aarch32/asm_macros.S b/include/common/aarch32/asm_macros.S
index 0d1a37d1e..74322228e 100644
--- a/include/common/aarch32/asm_macros.S
+++ b/include/common/aarch32/asm_macros.S
@@ -79,6 +79,16 @@
ldr r0, =(\_name + \_size)
.endm
+#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
+ /*
+ * ARMv7 cores without Virtualization extension do not support the
+ * eret instruction.
+ */
+ .macro eret
+ movs pc, lr
+ .endm
+#endif
+
#if (ARM_ARCH_MAJOR == 7)
/* ARMv7 does not support stl instruction */
.macro stl _reg, _write_lock
diff --git a/include/lib/aarch32/smcc_macros.S b/include/lib/aarch32/smcc_macros.S
index cf26175d6..93f211f7a 100644
--- a/include/lib/aarch32/smcc_macros.S
+++ b/include/lib/aarch32/smcc_macros.S
@@ -22,6 +22,44 @@
mov r0, sp
add r0, r0, #SMC_CTX_SP_USR
+#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
+ /* Must be in secure state to restore Monitor mode */
+ ldcopr r4, SCR
+ bic r2, r4, #SCR_NS_BIT
+ stcopr r2, SCR
+ isb
+
+ cps #MODE32_sys
+ stm r0!, {sp, lr}
+
+ cps #MODE32_irq
+ mrs r2, spsr
+ stm r0!, {r2, sp, lr}
+
+ cps #MODE32_fiq
+ mrs r2, spsr
+ stm r0!, {r2, sp, lr}
+
+ cps #MODE32_svc
+ mrs r2, spsr
+ stm r0!, {r2, sp, lr}
+
+ cps #MODE32_abt
+ mrs r2, spsr
+ stm r0!, {r2, sp, lr}
+
+ cps #MODE32_und
+ mrs r2, spsr
+ stm r0!, {r2, sp, lr}
+
+ /* lr_mon is already saved by caller */
+ cps #MODE32_mon
+ mrs r2, spsr
+ stm r0!, {r2}
+
+ stcopr r4, SCR
+ isb
+#else
/* Save the banked registers including the current SPSR and LR */
mrs r4, sp_usr
mrs r5, lr_usr
@@ -44,9 +82,10 @@
mrs r11, lr_und
mrs r12, spsr
stm r0!, {r4-r12}
-
/* lr_mon is already saved by caller */
+
ldcopr r4, SCR
+#endif
str r4, [sp, #SMC_CTX_SCR]
ldcopr r4, PMCR
str r4, [sp, #SMC_CTX_PMCR]
@@ -82,6 +121,44 @@
/* Restore the banked registers including the current SPSR */
add r1, r0, #SMC_CTX_SP_USR
+
+#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
+ /* Must be in secure state to restore Monitor mode */
+ ldcopr r4, SCR
+ bic r2, r4, #SCR_NS_BIT
+ stcopr r2, SCR
+ isb
+
+ cps #MODE32_sys
+ ldm r1!, {sp, lr}
+
+ cps #MODE32_irq
+ ldm r1!, {r2, sp, lr}
+ msr spsr_fsxc, r2
+
+ cps #MODE32_fiq
+ ldm r1!, {r2, sp, lr}
+ msr spsr_fsxc, r2
+
+ cps #MODE32_svc
+ ldm r1!, {r2, sp, lr}
+ msr spsr_fsxc, r2
+
+ cps #MODE32_abt
+ ldm r1!, {r2, sp, lr}
+ msr spsr_fsxc, r2
+
+ cps #MODE32_und
+ ldm r1!, {r2, sp, lr}
+ msr spsr_fsxc, r2
+
+ cps #MODE32_mon
+ ldm r1!, {r2}
+ msr spsr_fsxc, r2
+
+ stcopr r4, SCR
+ isb
+#else
ldm r1!, {r4-r12}
msr sp_usr, r4
msr lr_usr, r5
@@ -109,6 +186,7 @@
* f->[31:24] and c->[7:0] bits of SPSR.
*/
msr spsr_fsxc, r12
+#endif
/* Restore the LR */
ldr lr, [r0, #SMC_CTX_LR_MON]