ARM: convert all "mov.* pc, reg" to "bx reg" for ARMv6+
ARMv6 and greater introduced a new instruction ("bx") which can be used to return from function calls. Recent CPUs perform better when the "bx lr" instruction is used rather than the "mov pc, lr" instruction, and this sequence is strongly recommended to be used by the ARM architecture manual (section A.4.1.1). We provide a new macro "ret" with all its variants for the condition code which will resolve to the appropriate instruction. Rather than doing this piecemeal, and miss some instances, change all the "mov pc" instances to use the new macro, with the exception of the "movs" instruction and the kprobes code. This allows us to detect the "mov pc, lr" case and fix it up - and also gives us the possibility of deploying this for other registers depending on the CPU selection. Reported-by: Will Deacon <will.deacon@arm.com> Tested-by: Stephen Warren <swarren@nvidia.com> # Tegra Jetson TK1 Tested-by: Robert Jarzmik <robert.jarzmik@free.fr> # mioa701_bootresume.S Tested-by: Andrew Lunn <andrew@lunn.ch> # Kirkwood Tested-by: Shawn Guo <shawn.guo@freescale.com> Tested-by: Tony Lindgren <tony@atomide.com> # OMAPs Tested-by: Gregory CLEMENT <gregory.clement@free-electrons.com> # Armada XP, 375, 385 Acked-by: Sekhar Nori <nsekhar@ti.com> # DaVinci Acked-by: Christoffer Dall <christoffer.dall@linaro.org> # kvm/hyp Acked-by: Haojian Zhuang <haojian.zhuang@gmail.com> # PXA3xx Acked-by: Stefano Stabellini <stefano.stabellini@eu.citrix.com> # Xen Tested-by: Uwe Kleine-König <u.kleine-koenig@pengutronix.de> # ARMv7M Tested-by: Simon Horman <horms+renesas@verge.net.au> # Shmobile Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
This commit is contained in:
@@ -26,7 +26,7 @@
|
||||
#endif
|
||||
|
||||
ENTRY(cpu_v7_proc_init)
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(cpu_v7_proc_init)
|
||||
|
||||
ENTRY(cpu_v7_proc_fin)
|
||||
@@ -34,7 +34,7 @@ ENTRY(cpu_v7_proc_fin)
|
||||
bic r0, r0, #0x1000 @ ...i............
|
||||
bic r0, r0, #0x0006 @ .............ca.
|
||||
mcr p15, 0, r0, c1, c0, 0 @ disable caches
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(cpu_v7_proc_fin)
|
||||
|
||||
/*
|
||||
@@ -71,20 +71,20 @@ ENDPROC(cpu_v7_reset)
|
||||
ENTRY(cpu_v7_do_idle)
|
||||
dsb @ WFI may enter a low-power mode
|
||||
wfi
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(cpu_v7_do_idle)
|
||||
|
||||
ENTRY(cpu_v7_dcache_clean_area)
|
||||
ALT_SMP(W(nop)) @ MP extensions imply L1 PTW
|
||||
ALT_UP_B(1f)
|
||||
mov pc, lr
|
||||
ret lr
|
||||
1: dcache_line_size r2, r3
|
||||
2: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
|
||||
add r0, r0, r2
|
||||
subs r1, r1, r2
|
||||
bhi 2b
|
||||
dsb ishst
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(cpu_v7_dcache_clean_area)
|
||||
|
||||
string cpu_v7_name, "ARMv7 Processor"
|
||||
@@ -163,7 +163,7 @@ ENTRY(cpu_pj4b_do_idle)
|
||||
dsb @ WFI may enter a low-power mode
|
||||
wfi
|
||||
dsb @barrier
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(cpu_pj4b_do_idle)
|
||||
#else
|
||||
globl_equ cpu_pj4b_do_idle, cpu_v7_do_idle
|
||||
@@ -407,7 +407,7 @@ __v7_setup:
|
||||
bic r0, r0, r5 @ clear bits them
|
||||
orr r0, r0, r6 @ set them
|
||||
THUMB( orr r0, r0, #1 << 30 ) @ Thumb exceptions
|
||||
mov pc, lr @ return to head.S:__ret
|
||||
ret lr @ return to head.S:__ret
|
||||
ENDPROC(__v7_setup)
|
||||
|
||||
.align 2
|
||||
|
Reference in New Issue
Block a user