PIE: Use PC relative adrp/adr for symbol reference
authorSoby Mathew <soby.mathew@arm.com>
Fri, 12 Oct 2018 15:40:28 +0000 (16:40 +0100)
committerSoby Mathew <soby.mathew@arm.com>
Mon, 29 Oct 2018 09:54:31 +0000 (09:54 +0000)
This patch fixes up the AArch64 assembly code to use
adrp/adr instructions instead of ldr instruction for
reference to symbols. This allows these assembly
sequences to be Position Independant. Note that the
the reference to sizes have been replaced with
calculation of size at runtime. This is because size
is a constant value and does not depend on execution
address and using PC relative instructions for loading
them makes them relative to execution address. Also
we cannot use `ldr` instruction to load size as it
generates a dynamic relocation entry which must *not*
be fixed up and it is difficult for a dynamic loader
to differentiate which entries need to be skipped.

Change-Id: I8bf4ed5c58a9703629e5498a27624500ef40a836
Signed-off-by: Soby Mathew <soby.mathew@arm.com>
bl2/aarch64/bl2_entrypoint.S
include/common/aarch64/asm_macros.S
include/common/aarch64/el3_common_macros.S
include/lib/pmf/pmf_asm_macros.S
lib/romlib/init.s
lib/xlat_tables_v2/aarch64/enable_mmu.S

index bc8cbfd653601935e92ff47672a6ccad86ffb2e8..30a5c599a676aedf236602f2c05b5df239517ed4 100644 (file)
@@ -70,13 +70,19 @@ func bl2_entrypoint
         *   - the coherent memory section.
         * ---------------------------------------------
         */
-       ldr     x0, =__BSS_START__
-       ldr     x1, =__BSS_SIZE__
+       adrp    x0, __BSS_START__
+       add     x0, x0, :lo12:__BSS_START__
+       adrp    x1, __BSS_END__
+       add     x1, x1, :lo12:__BSS_END__
+       sub     x1, x1, x0
        bl      zeromem
 
 #if USE_COHERENT_MEM
-       ldr     x0, =__COHERENT_RAM_START__
-       ldr     x1, =__COHERENT_RAM_UNALIGNED_SIZE__
+       adrp    x0, __COHERENT_RAM_START__
+       add     x0, x0, :lo12:__COHERENT_RAM_START__
+       adrp    x1, __COHERENT_RAM_END_UNALIGNED__
+       add     x1, x1, :lo12:__COHERENT_RAM_END_UNALIGNED__
+       sub     x1, x1, x0
        bl      zeromem
 #endif
 
index 9621a1c02c8215dcf4d974a7fd044c77e9060a03..91416e4e4f8d6322616d814fc1511befb7bdc973 100644 (file)
         * Clobber: X30, X1, X2
         */
        .macro get_my_mp_stack _name, _size
-       bl  plat_my_core_pos
-       ldr x2, =(\_name + \_size)
+       bl      plat_my_core_pos
+       adrp    x2, (\_name + \_size)
+       add     x2, x2, :lo12:(\_name + \_size)
        mov x1, #\_size
        madd x0, x0, x1, x2
        .endm
         * Out: X0 = physical address of stack base
         */
        .macro get_up_stack _name, _size
-       ldr x0, =(\_name + \_size)
+       adrp    x0, (\_name + \_size)
+       add     x0, x0, :lo12:(\_name + \_size)
        .endm
 
        /*
index 143c70c3912289e39f204a99d7a86a00e7527180..4902583b15b4931051630fd147d5ac1ac9c163b6 100644 (file)
                 * an earlier boot loader stage.
                 * -------------------------------------------------------------
                 */
-               ldr     x0, =__RW_START__
-               ldr     x1, =__RW_END__
+               adrp    x0, __RW_START__
+               add     x0, x0, :lo12:__RW_START__
+               adrp    x1, __RW_END__
+               add     x1, x1, :lo12:__RW_END__
                sub     x1, x1, x0
                bl      inv_dcache_range
 #endif
+               adrp    x0, __BSS_START__
+               add     x0, x0, :lo12:__BSS_START__
 
-               ldr     x0, =__BSS_START__
-               ldr     x1, =__BSS_SIZE__
+               adrp    x1, __BSS_END__
+               add     x1, x1, :lo12:__BSS_END__
+               sub     x1, x1, x0
                bl      zeromem
 
 #if USE_COHERENT_MEM
-               ldr     x0, =__COHERENT_RAM_START__
-               ldr     x1, =__COHERENT_RAM_UNALIGNED_SIZE__
+               adrp    x0, __COHERENT_RAM_START__
+               add     x0, x0, :lo12:__COHERENT_RAM_START__
+               adrp    x1, __COHERENT_RAM_END_UNALIGNED__
+               add     x1, x1, :lo12: __COHERENT_RAM_END_UNALIGNED__
+               sub     x1, x1, x0
                bl      zeromem
 #endif
 
 #if defined(IMAGE_BL1) || (defined(IMAGE_BL2) && BL2_IN_XIP_MEM)
-               ldr     x0, =__DATA_RAM_START__
-               ldr     x1, =__DATA_ROM_START__
-               ldr     x2, =__DATA_SIZE__
+               adrp    x0, __DATA_RAM_START__
+               add     x0, x0, :lo12:__DATA_RAM_START__
+               adrp    x1, __DATA_ROM_START__
+               add     x1, x1, :lo12:__DATA_ROM_START__
+               adrp    x2, __DATA_RAM_END__
+               add     x2, x2, :lo12:__DATA_RAM_END__
+               sub     x2, x2, x0
                bl      memcpy16
 #endif
        .endif /* _init_c_runtime */
index d58829eeccca19c7f0374948b11093006e9306e0..5e19e62f708c3fb242b3b93bddcc540aec89a500 100644 (file)
        mov     x9, x30
        bl      plat_my_core_pos
        mov     x30, x9
-       ldr     x1, =__PERCPU_TIMESTAMP_SIZE__
+       adr     x2, __PMF_PERCPU_TIMESTAMP_END__
+       adr     x1, __PMF_TIMESTAMP_START__
+       sub     x1, x2, x1
        mov     x2, #(\_tid * PMF_TS_SIZE)
        madd    x0, x0, x1, x2
-       ldr     x1, =pmf_ts_mem_\_name
+       adr     x1, pmf_ts_mem_\_name
        add     x0, x0, x1
        .endm
 
index 5cf2aca045b86f789ffda9d3f446dc8bcefb3800..7d97e4d1de5820ef8c31c54ab84109a252fa8883 100644 (file)
@@ -5,7 +5,7 @@
  */
 
        .globl  rom_lib_init
-       .extern __DATA_RAM_START__, __DATA_ROM_START__, __DATA_SIZE__
+       .extern __DATA_RAM_START__, __DATA_ROM_START__, __DATA_RAM_END__
        .extern memset, memcpy
 
 rom_lib_init:
@@ -16,13 +16,19 @@ rom_lib_init:
 
 1:     stp     x29, x30, [sp, #-16]!
        adrp    x0, __DATA_RAM_START__
-       ldr     x1,= __DATA_ROM_START__
-       ldr     x2, =__DATA_SIZE__
+       adrp    x1, __DATA_ROM_START__
+       add     x1, x1, :lo12:__DATA_ROM_START__
+       adrp    x2, __DATA_RAM_END__
+       add     x2, x2, :lo12:__DATA_RAM_END__
+       sub     x2, x2, x0
        bl      memcpy
 
-       ldr     x0, =__BSS_START__
+       adrp    x0,__BSS_START__
+       add     x0, x0, :lo12:__BSS_START__
        mov     x1, #0
-       ldr     x2, =__BSS_SIZE__
+       adrp    x2, __BSS_END__
+       add     x2, x2, :lo12:__BSS_END__
+       sub     x2, x2, x0
        bl      memset
        ldp     x29, x30, [sp], #16
 
index 21717d28a847ab04367554ee12cbd3824a4fc0c4..504c03c152674179afee3e2bb5c7b330a59e1b29 100644 (file)
@@ -45,7 +45,8 @@
                tlbi_invalidate_all \el
 
                mov     x7, x0
-               ldr     x0, =mmu_cfg_params
+               adrp    x0, mmu_cfg_params
+               add     x0, x0, :lo12:mmu_cfg_params
 
                /* MAIR */
                ldr     x1, [x0, #(MMU_CFG_MAIR << 3)]