summaryrefslogtreecommitdiff
path: root/arch/arm/include
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2022-04-20 09:41:31 +0100
committerRussell King (Oracle) <rmk+kernel@armlinux.org.uk>2022-05-20 12:32:32 +0100
commit508074607c7b95b24f0adf633fdf606761bb7824 (patch)
tree6bb3ce001060faa2c5da3c985e219c10e557330c /arch/arm/include
parent952f03316352c606bebef56ba8f9642edbb8e348 (diff)
ARM: 9195/1: entry: avoid explicit literal loads
ARMv7 has MOVW/MOVT instruction pairs to load symbol addresses into registers without having to rely on literal loads that go via the D-cache. For older cores, we now support a similar arrangement, based on PC-relative group relocations. This means we can elide most literal loads entirely from the entry path, by switching to the ldr_va macro to emit the appropriate sequence depending on the target architecture revision. While at it, switch to the bl_r macro for invoking the right PABT/DABT helpers instead of setting the LR register explicitly, which does not play well with cores that speculate across function returns. Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Reviewed-by: Linus Walleij <linus.walleij@linaro.org> Signed-off-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
Diffstat (limited to 'arch/arm/include')
-rw-r--r--arch/arm/include/asm/assembler.h18
1 files changed, 9 insertions, 9 deletions
diff --git a/arch/arm/include/asm/assembler.h b/arch/arm/include/asm/assembler.h
index 8e59d748358d..90fbe4a3f9c8 100644
--- a/arch/arm/include/asm/assembler.h
+++ b/arch/arm/include/asm/assembler.h
@@ -666,12 +666,11 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
__adldst_l str, \src, \sym, \tmp, \cond
.endm
- .macro __ldst_va, op, reg, tmp, sym, cond
+ .macro __ldst_va, op, reg, tmp, sym, cond, offset
#if __LINUX_ARM_ARCH__ >= 7 || \
!defined(CONFIG_ARM_HAS_GROUP_RELOCS) || \
(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS))
mov_l \tmp, \sym, \cond
- \op\cond \reg, [\tmp]
#else
/*
* Avoid a literal load, by emitting a sequence of ADD/LDR instructions
@@ -683,20 +682,21 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
.reloc .L0_\@, R_ARM_ALU_PC_G0_NC, \sym
.reloc .L1_\@, R_ARM_ALU_PC_G1_NC, \sym
.reloc .L2_\@, R_ARM_LDR_PC_G2, \sym
-.L0_\@: sub\cond \tmp, pc, #8
-.L1_\@: sub\cond \tmp, \tmp, #4
-.L2_\@: \op\cond \reg, [\tmp, #0]
+.L0_\@: sub\cond \tmp, pc, #8 - \offset
+.L1_\@: sub\cond \tmp, \tmp, #4 - \offset
+.L2_\@:
#endif
+ \op\cond \reg, [\tmp, #\offset]
.endm
/*
* ldr_va - load a 32-bit word from the virtual address of \sym
*/
- .macro ldr_va, rd:req, sym:req, cond, tmp
+ .macro ldr_va, rd:req, sym:req, cond, tmp, offset=0
.ifnb \tmp
- __ldst_va ldr, \rd, \tmp, \sym, \cond
+ __ldst_va ldr, \rd, \tmp, \sym, \cond, \offset
.else
- __ldst_va ldr, \rd, \rd, \sym, \cond
+ __ldst_va ldr, \rd, \rd, \sym, \cond, \offset
.endif
.endm
@@ -704,7 +704,7 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
* str_va - store a 32-bit word to the virtual address of \sym
*/
.macro str_va, rn:req, sym:req, tmp:req, cond
- __ldst_va str, \rn, \tmp, \sym, \cond
+ __ldst_va str, \rn, \tmp, \sym, \cond, 0
.endm
/*