aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorSoby Mathew <soby.mathew@arm.com>2018-10-12 16:40:28 +0100
committerSoby Mathew <soby.mathew@arm.com>2018-10-29 09:54:31 +0000
commitf1722b693d363cc6a2b624d59f0442bf845baf62 (patch)
tree2b8a412e48830f66641cecdb308a152db268a60f /include
parent3bd17c0fef46a7cd894974ac4c996801458f6d09 (diff)
downloadplatform_external_arm-trusted-firmware-f1722b693d363cc6a2b624d59f0442bf845baf62.tar.gz
platform_external_arm-trusted-firmware-f1722b693d363cc6a2b624d59f0442bf845baf62.tar.bz2
platform_external_arm-trusted-firmware-f1722b693d363cc6a2b624d59f0442bf845baf62.zip
PIE: Use PC relative adrp/adr for symbol reference
This patch fixes up the AArch64 assembly code to use adrp/adr instructions instead of ldr instruction for reference to symbols. This allows these assembly sequences to be Position Independant. Note that the the reference to sizes have been replaced with calculation of size at runtime. This is because size is a constant value and does not depend on execution address and using PC relative instructions for loading them makes them relative to execution address. Also we cannot use `ldr` instruction to load size as it generates a dynamic relocation entry which must *not* be fixed up and it is difficult for a dynamic loader to differentiate which entries need to be skipped. Change-Id: I8bf4ed5c58a9703629e5498a27624500ef40a836 Signed-off-by: Soby Mathew <soby.mathew@arm.com>
Diffstat (limited to 'include')
-rw-r--r--include/common/aarch64/asm_macros.S8
-rw-r--r--include/common/aarch64/el3_common_macros.S30
-rw-r--r--include/lib/pmf/pmf_asm_macros.S6
3 files changed, 30 insertions, 14 deletions
diff --git a/include/common/aarch64/asm_macros.S b/include/common/aarch64/asm_macros.S
index 9621a1c02..91416e4e4 100644
--- a/include/common/aarch64/asm_macros.S
+++ b/include/common/aarch64/asm_macros.S
@@ -105,8 +105,9 @@
* Clobber: X30, X1, X2
*/
.macro get_my_mp_stack _name, _size
- bl plat_my_core_pos
- ldr x2, =(\_name + \_size)
+ bl plat_my_core_pos
+ adrp x2, (\_name + \_size)
+ add x2, x2, :lo12:(\_name + \_size)
mov x1, #\_size
madd x0, x0, x1, x2
.endm
@@ -117,7 +118,8 @@
* Out: X0 = physical address of stack base
*/
.macro get_up_stack _name, _size
- ldr x0, =(\_name + \_size)
+ adrp x0, (\_name + \_size)
+ add x0, x0, :lo12:(\_name + \_size)
.endm
/*
diff --git a/include/common/aarch64/el3_common_macros.S b/include/common/aarch64/el3_common_macros.S
index 143c70c39..4902583b1 100644
--- a/include/common/aarch64/el3_common_macros.S
+++ b/include/common/aarch64/el3_common_macros.S
@@ -283,26 +283,38 @@
* an earlier boot loader stage.
* -------------------------------------------------------------
*/
- ldr x0, =__RW_START__
- ldr x1, =__RW_END__
+ adrp x0, __RW_START__
+ add x0, x0, :lo12:__RW_START__
+ adrp x1, __RW_END__
+ add x1, x1, :lo12:__RW_END__
sub x1, x1, x0
bl inv_dcache_range
#endif
+ adrp x0, __BSS_START__
+ add x0, x0, :lo12:__BSS_START__
- ldr x0, =__BSS_START__
- ldr x1, =__BSS_SIZE__
+ adrp x1, __BSS_END__
+ add x1, x1, :lo12:__BSS_END__
+ sub x1, x1, x0
bl zeromem
#if USE_COHERENT_MEM
- ldr x0, =__COHERENT_RAM_START__
- ldr x1, =__COHERENT_RAM_UNALIGNED_SIZE__
+ adrp x0, __COHERENT_RAM_START__
+ add x0, x0, :lo12:__COHERENT_RAM_START__
+ adrp x1, __COHERENT_RAM_END_UNALIGNED__
+ add x1, x1, :lo12: __COHERENT_RAM_END_UNALIGNED__
+ sub x1, x1, x0
bl zeromem
#endif
#if defined(IMAGE_BL1) || (defined(IMAGE_BL2) && BL2_IN_XIP_MEM)
- ldr x0, =__DATA_RAM_START__
- ldr x1, =__DATA_ROM_START__
- ldr x2, =__DATA_SIZE__
+ adrp x0, __DATA_RAM_START__
+ add x0, x0, :lo12:__DATA_RAM_START__
+ adrp x1, __DATA_ROM_START__
+ add x1, x1, :lo12:__DATA_ROM_START__
+ adrp x2, __DATA_RAM_END__
+ add x2, x2, :lo12:__DATA_RAM_END__
+ sub x2, x2, x0
bl memcpy16
#endif
.endif /* _init_c_runtime */
diff --git a/include/lib/pmf/pmf_asm_macros.S b/include/lib/pmf/pmf_asm_macros.S
index d58829eec..5e19e62f7 100644
--- a/include/lib/pmf/pmf_asm_macros.S
+++ b/include/lib/pmf/pmf_asm_macros.S
@@ -18,10 +18,12 @@
mov x9, x30
bl plat_my_core_pos
mov x30, x9
- ldr x1, =__PERCPU_TIMESTAMP_SIZE__
+ adr x2, __PMF_PERCPU_TIMESTAMP_END__
+ adr x1, __PMF_TIMESTAMP_START__
+ sub x1, x2, x1
mov x2, #(\_tid * PMF_TS_SIZE)
madd x0, x0, x1, x2
- ldr x1, =pmf_ts_mem_\_name
+ adr x1, pmf_ts_mem_\_name
add x0, x0, x1
.endm