mirror of
https://github.com/armbian/linux-cix.git
synced 2026-01-06 12:30:45 -08:00
ARM: convert all "mov.* pc, reg" to "bx reg" for ARMv6+
ARMv6 and greater introduced a new instruction ("bx") which can be used
to return from function calls. Recent CPUs perform better when the
"bx lr" instruction is used rather than the "mov pc, lr" instruction,
and this sequence is strongly recommended to be used by the ARM
architecture manual (section A.4.1.1).
We provide a new macro "ret" with all its variants for the condition
code which will resolve to the appropriate instruction.
Rather than doing this piecemeal, and miss some instances, change all
the "mov pc" instances to use the new macro, with the exception of
the "movs" instruction and the kprobes code. This allows us to detect
the "mov pc, lr" case and fix it up - and also gives us the possibility
of deploying this for other registers depending on the CPU selection.
Reported-by: Will Deacon <will.deacon@arm.com>
Tested-by: Stephen Warren <swarren@nvidia.com> # Tegra Jetson TK1
Tested-by: Robert Jarzmik <robert.jarzmik@free.fr> # mioa701_bootresume.S
Tested-by: Andrew Lunn <andrew@lunn.ch> # Kirkwood
Tested-by: Shawn Guo <shawn.guo@freescale.com>
Tested-by: Tony Lindgren <tony@atomide.com> # OMAPs
Tested-by: Gregory CLEMENT <gregory.clement@free-electrons.com> # Armada XP, 375, 385
Acked-by: Sekhar Nori <nsekhar@ti.com> # DaVinci
Acked-by: Christoffer Dall <christoffer.dall@linaro.org> # kvm/hyp
Acked-by: Haojian Zhuang <haojian.zhuang@gmail.com> # PXA3xx
Acked-by: Stefano Stabellini <stefano.stabellini@eu.citrix.com> # Xen
Tested-by: Uwe Kleine-König <u.kleine-koenig@pengutronix.de> # ARMv7M
Tested-by: Simon Horman <horms+renesas@verge.net.au> # Shmobile
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
This commit is contained in:
@@ -35,6 +35,7 @@
|
||||
@ that is being targetted.
|
||||
|
||||
#include <linux/linkage.h>
|
||||
#include <asm/assembler.h>
|
||||
|
||||
.text
|
||||
|
||||
@@ -648,7 +649,7 @@ _armv4_AES_set_encrypt_key:
|
||||
|
||||
.Ldone: mov r0,#0
|
||||
ldmia sp!,{r4-r12,lr}
|
||||
.Labrt: mov pc,lr
|
||||
.Labrt: ret lr
|
||||
ENDPROC(private_AES_set_encrypt_key)
|
||||
|
||||
.align 5
|
||||
|
||||
@@ -427,4 +427,25 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
|
||||
#endif
|
||||
.endm
|
||||
|
||||
.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
|
||||
.macro ret\c, reg
|
||||
#if __LINUX_ARM_ARCH__ < 6
|
||||
mov\c pc, \reg
|
||||
#else
|
||||
.ifeqs "\reg", "lr"
|
||||
bx\c \reg
|
||||
.else
|
||||
mov\c pc, \reg
|
||||
.endif
|
||||
#endif
|
||||
.endm
|
||||
.endr
|
||||
|
||||
.macro ret.w, reg
|
||||
ret \reg
|
||||
#ifdef CONFIG_THUMB2_KERNEL
|
||||
nop
|
||||
#endif
|
||||
.endm
|
||||
|
||||
#endif /* __ASM_ASSEMBLER_H__ */
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
\symbol_name:
|
||||
mov r8, lr
|
||||
arch_irq_handler_default
|
||||
mov pc, r8
|
||||
ret r8
|
||||
.endm
|
||||
|
||||
@@ -90,7 +90,7 @@ ENTRY(printascii)
|
||||
ldrneb r1, [r0], #1
|
||||
teqne r1, #0
|
||||
bne 1b
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(printascii)
|
||||
|
||||
ENTRY(printch)
|
||||
@@ -105,7 +105,7 @@ ENTRY(debug_ll_addr)
|
||||
addruart r2, r3, ip
|
||||
str r2, [r0]
|
||||
str r3, [r1]
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(debug_ll_addr)
|
||||
#endif
|
||||
|
||||
@@ -116,7 +116,7 @@ ENTRY(printascii)
|
||||
mov r0, #0x04 @ SYS_WRITE0
|
||||
ARM( svc #0x123456 )
|
||||
THUMB( svc #0xab )
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(printascii)
|
||||
|
||||
ENTRY(printch)
|
||||
@@ -125,14 +125,14 @@ ENTRY(printch)
|
||||
mov r0, #0x03 @ SYS_WRITEC
|
||||
ARM( svc #0x123456 )
|
||||
THUMB( svc #0xab )
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(printch)
|
||||
|
||||
ENTRY(debug_ll_addr)
|
||||
mov r2, #0
|
||||
str r2, [r0]
|
||||
str r2, [r1]
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(debug_ll_addr)
|
||||
|
||||
#endif
|
||||
|
||||
@@ -224,7 +224,7 @@ svc_preempt:
|
||||
1: bl preempt_schedule_irq @ irq en/disable is done inside
|
||||
ldr r0, [tsk, #TI_FLAGS] @ get new tasks TI_FLAGS
|
||||
tst r0, #_TIF_NEED_RESCHED
|
||||
moveq pc, r8 @ go again
|
||||
reteq r8 @ go again
|
||||
b 1b
|
||||
#endif
|
||||
|
||||
@@ -490,7 +490,7 @@ ENDPROC(__und_usr)
|
||||
.pushsection .fixup, "ax"
|
||||
.align 2
|
||||
4: str r4, [sp, #S_PC] @ retry current instruction
|
||||
mov pc, r9
|
||||
ret r9
|
||||
.popsection
|
||||
.pushsection __ex_table,"a"
|
||||
.long 1b, 4b
|
||||
@@ -552,7 +552,7 @@ call_fpe:
|
||||
#endif
|
||||
tst r0, #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
|
||||
tstne r0, #0x04000000 @ bit 26 set on both ARM and Thumb-2
|
||||
moveq pc, lr
|
||||
reteq lr
|
||||
and r8, r0, #0x00000f00 @ mask out CP number
|
||||
THUMB( lsr r8, r8, #8 )
|
||||
mov r7, #1
|
||||
@@ -571,33 +571,33 @@ call_fpe:
|
||||
THUMB( add pc, r8 )
|
||||
nop
|
||||
|
||||
movw_pc lr @ CP#0
|
||||
ret.w lr @ CP#0
|
||||
W(b) do_fpe @ CP#1 (FPE)
|
||||
W(b) do_fpe @ CP#2 (FPE)
|
||||
movw_pc lr @ CP#3
|
||||
ret.w lr @ CP#3
|
||||
#ifdef CONFIG_CRUNCH
|
||||
b crunch_task_enable @ CP#4 (MaverickCrunch)
|
||||
b crunch_task_enable @ CP#5 (MaverickCrunch)
|
||||
b crunch_task_enable @ CP#6 (MaverickCrunch)
|
||||
#else
|
||||
movw_pc lr @ CP#4
|
||||
movw_pc lr @ CP#5
|
||||
movw_pc lr @ CP#6
|
||||
ret.w lr @ CP#4
|
||||
ret.w lr @ CP#5
|
||||
ret.w lr @ CP#6
|
||||
#endif
|
||||
movw_pc lr @ CP#7
|
||||
movw_pc lr @ CP#8
|
||||
movw_pc lr @ CP#9
|
||||
ret.w lr @ CP#7
|
||||
ret.w lr @ CP#8
|
||||
ret.w lr @ CP#9
|
||||
#ifdef CONFIG_VFP
|
||||
W(b) do_vfp @ CP#10 (VFP)
|
||||
W(b) do_vfp @ CP#11 (VFP)
|
||||
#else
|
||||
movw_pc lr @ CP#10 (VFP)
|
||||
movw_pc lr @ CP#11 (VFP)
|
||||
ret.w lr @ CP#10 (VFP)
|
||||
ret.w lr @ CP#11 (VFP)
|
||||
#endif
|
||||
movw_pc lr @ CP#12
|
||||
movw_pc lr @ CP#13
|
||||
movw_pc lr @ CP#14 (Debug)
|
||||
movw_pc lr @ CP#15 (Control)
|
||||
ret.w lr @ CP#12
|
||||
ret.w lr @ CP#13
|
||||
ret.w lr @ CP#14 (Debug)
|
||||
ret.w lr @ CP#15 (Control)
|
||||
|
||||
#ifdef NEED_CPU_ARCHITECTURE
|
||||
.align 2
|
||||
@@ -649,7 +649,7 @@ ENTRY(fp_enter)
|
||||
.popsection
|
||||
|
||||
ENTRY(no_fp)
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(no_fp)
|
||||
|
||||
__und_usr_fault_32:
|
||||
@@ -745,7 +745,7 @@ ENDPROC(__switch_to)
|
||||
#ifdef CONFIG_ARM_THUMB
|
||||
bx \reg
|
||||
#else
|
||||
mov pc, \reg
|
||||
ret \reg
|
||||
#endif
|
||||
.endm
|
||||
|
||||
@@ -837,7 +837,7 @@ kuser_cmpxchg64_fixup:
|
||||
#if __LINUX_ARM_ARCH__ < 6
|
||||
bcc kuser_cmpxchg32_fixup
|
||||
#endif
|
||||
mov pc, lr
|
||||
ret lr
|
||||
.previous
|
||||
|
||||
#else
|
||||
@@ -905,7 +905,7 @@ kuser_cmpxchg32_fixup:
|
||||
subs r8, r4, r7
|
||||
rsbcss r8, r8, #(2b - 1b)
|
||||
strcs r7, [sp, #S_PC]
|
||||
mov pc, lr
|
||||
ret lr
|
||||
.previous
|
||||
|
||||
#else
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
* published by the Free Software Foundation.
|
||||
*/
|
||||
|
||||
#include <asm/assembler.h>
|
||||
#include <asm/unistd.h>
|
||||
#include <asm/ftrace.h>
|
||||
#include <asm/unwind.h>
|
||||
@@ -88,7 +89,7 @@ ENTRY(ret_from_fork)
|
||||
cmp r5, #0
|
||||
movne r0, r4
|
||||
adrne lr, BSYM(1f)
|
||||
movne pc, r5
|
||||
retne r5
|
||||
1: get_thread_info tsk
|
||||
b ret_slow_syscall
|
||||
ENDPROC(ret_from_fork)
|
||||
@@ -290,7 +291,7 @@ ENDPROC(ftrace_graph_caller_old)
|
||||
|
||||
.macro mcount_exit
|
||||
ldmia sp!, {r0-r3, ip, lr}
|
||||
mov pc, ip
|
||||
ret ip
|
||||
.endm
|
||||
|
||||
ENTRY(__gnu_mcount_nc)
|
||||
@@ -298,7 +299,7 @@ UNWIND(.fnstart)
|
||||
#ifdef CONFIG_DYNAMIC_FTRACE
|
||||
mov ip, lr
|
||||
ldmia sp!, {lr}
|
||||
mov pc, ip
|
||||
ret ip
|
||||
#else
|
||||
__mcount
|
||||
#endif
|
||||
@@ -333,12 +334,12 @@ return_to_handler:
|
||||
bl ftrace_return_to_handler
|
||||
mov lr, r0 @ r0 has real ret addr
|
||||
ldmia sp!, {r0-r3}
|
||||
mov pc, lr
|
||||
ret lr
|
||||
#endif
|
||||
|
||||
ENTRY(ftrace_stub)
|
||||
.Lftrace_stub:
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(ftrace_stub)
|
||||
|
||||
#endif /* CONFIG_FUNCTION_TRACER */
|
||||
@@ -561,7 +562,7 @@ sys_mmap2:
|
||||
streq r5, [sp, #4]
|
||||
beq sys_mmap_pgoff
|
||||
mov r0, #-EINVAL
|
||||
mov pc, lr
|
||||
ret lr
|
||||
#else
|
||||
str r5, [sp, #4]
|
||||
b sys_mmap_pgoff
|
||||
|
||||
@@ -240,12 +240,6 @@
|
||||
movs pc, lr @ return & move spsr_svc into cpsr
|
||||
.endm
|
||||
|
||||
@
|
||||
@ 32-bit wide "mov pc, reg"
|
||||
@
|
||||
.macro movw_pc, reg
|
||||
mov pc, \reg
|
||||
.endm
|
||||
#else /* CONFIG_THUMB2_KERNEL */
|
||||
.macro svc_exit, rpsr, irq = 0
|
||||
.if \irq != 0
|
||||
@@ -304,14 +298,6 @@
|
||||
movs pc, lr @ return & move spsr_svc into cpsr
|
||||
.endm
|
||||
#endif /* ifdef CONFIG_CPU_V7M / else */
|
||||
|
||||
@
|
||||
@ 32-bit wide "mov pc, reg"
|
||||
@
|
||||
.macro movw_pc, reg
|
||||
mov pc, \reg
|
||||
nop
|
||||
.endm
|
||||
#endif /* !CONFIG_THUMB2_KERNEL */
|
||||
|
||||
/*
|
||||
|
||||
@@ -32,7 +32,7 @@ ENTRY(__set_fiq_regs)
|
||||
ldr lr, [r0]
|
||||
msr cpsr_c, r1 @ return to SVC mode
|
||||
mov r0, r0 @ avoid hazard prior to ARMv4
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(__set_fiq_regs)
|
||||
|
||||
ENTRY(__get_fiq_regs)
|
||||
@@ -45,5 +45,5 @@ ENTRY(__get_fiq_regs)
|
||||
str lr, [r0]
|
||||
msr cpsr_c, r1 @ return to SVC mode
|
||||
mov r0, r0 @ avoid hazard prior to ARMv4
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(__get_fiq_regs)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
*/
|
||||
#include <asm/assembler.h>
|
||||
|
||||
#define ATAG_CORE 0x54410001
|
||||
#define ATAG_CORE_SIZE ((2*4 + 3*4) >> 2)
|
||||
@@ -61,10 +62,10 @@ __vet_atags:
|
||||
cmp r5, r6
|
||||
bne 1f
|
||||
|
||||
2: mov pc, lr @ atag/dtb pointer is ok
|
||||
2: ret lr @ atag/dtb pointer is ok
|
||||
|
||||
1: mov r2, #0
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(__vet_atags)
|
||||
|
||||
/*
|
||||
@@ -162,7 +163,7 @@ __lookup_processor_type:
|
||||
cmp r5, r6
|
||||
blo 1b
|
||||
mov r5, #0 @ unknown processor
|
||||
2: mov pc, lr
|
||||
2: ret lr
|
||||
ENDPROC(__lookup_processor_type)
|
||||
|
||||
/*
|
||||
|
||||
@@ -82,7 +82,7 @@ ENTRY(stext)
|
||||
adr lr, BSYM(1f) @ return (PIC) address
|
||||
ARM( add pc, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( add r12, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( mov pc, r12 )
|
||||
THUMB( ret r12 )
|
||||
1: b __after_proc_init
|
||||
ENDPROC(stext)
|
||||
|
||||
@@ -119,7 +119,7 @@ ENTRY(secondary_startup)
|
||||
mov r13, r12 @ __secondary_switched address
|
||||
ARM( add pc, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( add r12, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( mov pc, r12 )
|
||||
THUMB( ret r12 )
|
||||
ENDPROC(secondary_startup)
|
||||
|
||||
ENTRY(__secondary_switched)
|
||||
@@ -164,7 +164,7 @@ __after_proc_init:
|
||||
#endif
|
||||
mcr p15, 0, r0, c1, c0, 0 @ write control reg
|
||||
#endif /* CONFIG_CPU_CP15 */
|
||||
mov pc, r13
|
||||
ret r13
|
||||
ENDPROC(__after_proc_init)
|
||||
.ltorg
|
||||
|
||||
@@ -254,7 +254,7 @@ ENTRY(__setup_mpu)
|
||||
orr r0, r0, #CR_M @ Set SCTRL.M (MPU on)
|
||||
mcr p15, 0, r0, c1, c0, 0 @ Enable MPU
|
||||
isb
|
||||
mov pc,lr
|
||||
ret lr
|
||||
ENDPROC(__setup_mpu)
|
||||
#endif
|
||||
#include "head-common.S"
|
||||
|
||||
@@ -140,7 +140,7 @@ ENTRY(stext)
|
||||
mov r8, r4 @ set TTBR1 to swapper_pg_dir
|
||||
ARM( add pc, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( add r12, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( mov pc, r12 )
|
||||
THUMB( ret r12 )
|
||||
1: b __enable_mmu
|
||||
ENDPROC(stext)
|
||||
.ltorg
|
||||
@@ -335,7 +335,7 @@ __create_page_tables:
|
||||
sub r4, r4, #0x1000 @ point to the PGD table
|
||||
mov r4, r4, lsr #ARCH_PGD_SHIFT
|
||||
#endif
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(__create_page_tables)
|
||||
.ltorg
|
||||
.align
|
||||
@@ -383,7 +383,7 @@ ENTRY(secondary_startup)
|
||||
ARM( add pc, r10, #PROCINFO_INITFUNC ) @ initialise processor
|
||||
@ (return control reg)
|
||||
THUMB( add r12, r10, #PROCINFO_INITFUNC )
|
||||
THUMB( mov pc, r12 )
|
||||
THUMB( ret r12 )
|
||||
ENDPROC(secondary_startup)
|
||||
|
||||
/*
|
||||
@@ -468,7 +468,7 @@ ENTRY(__turn_mmu_on)
|
||||
instr_sync
|
||||
mov r3, r3
|
||||
mov r3, r13
|
||||
mov pc, r3
|
||||
ret r3
|
||||
__turn_mmu_on_end:
|
||||
ENDPROC(__turn_mmu_on)
|
||||
.popsection
|
||||
@@ -487,7 +487,7 @@ __fixup_smp:
|
||||
orr r4, r4, #0x0000b000
|
||||
orr r4, r4, #0x00000020 @ val 0x4100b020
|
||||
teq r3, r4 @ ARM 11MPCore?
|
||||
moveq pc, lr @ yes, assume SMP
|
||||
reteq lr @ yes, assume SMP
|
||||
|
||||
mrc p15, 0, r0, c0, c0, 5 @ read MPIDR
|
||||
and r0, r0, #0xc0000000 @ multiprocessing extensions and
|
||||
@@ -500,7 +500,7 @@ __fixup_smp:
|
||||
orr r4, r4, #0x0000c000
|
||||
orr r4, r4, #0x00000090
|
||||
teq r3, r4 @ Check for ARM Cortex-A9
|
||||
movne pc, lr @ Not ARM Cortex-A9,
|
||||
retne lr @ Not ARM Cortex-A9,
|
||||
|
||||
@ If a future SoC *does* use 0x0 as the PERIPH_BASE, then the
|
||||
@ below address check will need to be #ifdef'd or equivalent
|
||||
@@ -512,7 +512,7 @@ __fixup_smp:
|
||||
ARM_BE8(rev r0, r0) @ byteswap if big endian
|
||||
and r0, r0, #0x3 @ number of CPUs
|
||||
teq r0, #0x0 @ is 1?
|
||||
movne pc, lr
|
||||
retne lr
|
||||
|
||||
__fixup_smp_on_up:
|
||||
adr r0, 1f
|
||||
@@ -539,7 +539,7 @@ smp_on_up:
|
||||
.text
|
||||
__do_fixup_smp_on_up:
|
||||
cmp r4, r5
|
||||
movhs pc, lr
|
||||
reths lr
|
||||
ldmia r4!, {r0, r6}
|
||||
ARM( str r6, [r0, r3] )
|
||||
THUMB( add r0, r0, r3 )
|
||||
@@ -672,7 +672,7 @@ ARM_BE8(rev16 ip, ip)
|
||||
2: cmp r4, r5
|
||||
ldrcc r7, [r4], #4 @ use branch for delay slot
|
||||
bcc 1b
|
||||
mov pc, lr
|
||||
ret lr
|
||||
#endif
|
||||
ENDPROC(__fixup_a_pv_table)
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ ENTRY(__hyp_stub_install_secondary)
|
||||
* immediately.
|
||||
*/
|
||||
compare_cpu_mode_with_primary r4, r5, r6, r7
|
||||
movne pc, lr
|
||||
retne lr
|
||||
|
||||
/*
|
||||
* Once we have given up on one CPU, we do not try to install the
|
||||
@@ -111,7 +111,7 @@ ENTRY(__hyp_stub_install_secondary)
|
||||
*/
|
||||
|
||||
cmp r4, #HYP_MODE
|
||||
movne pc, lr @ give up if the CPU is not in HYP mode
|
||||
retne lr @ give up if the CPU is not in HYP mode
|
||||
|
||||
/*
|
||||
* Configure HSCTLR to set correct exception endianness/instruction set
|
||||
@@ -201,7 +201,7 @@ ENDPROC(__hyp_get_vectors)
|
||||
@ fall through
|
||||
ENTRY(__hyp_set_vectors)
|
||||
__HVC(0)
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(__hyp_set_vectors)
|
||||
|
||||
#ifndef ZIMAGE
|
||||
|
||||
@@ -179,7 +179,7 @@ concan_load:
|
||||
get_thread_info r10
|
||||
#endif
|
||||
4: dec_preempt_count r10, r3
|
||||
mov pc, lr
|
||||
ret lr
|
||||
|
||||
/*
|
||||
* Back up Concan regs to save area and disable access to them
|
||||
@@ -265,7 +265,7 @@ ENTRY(iwmmxt_task_copy)
|
||||
mov r3, lr @ preserve return address
|
||||
bl concan_dump
|
||||
msr cpsr_c, ip @ restore interrupt mode
|
||||
mov pc, r3
|
||||
ret r3
|
||||
|
||||
/*
|
||||
* Restore Concan state from given memory address
|
||||
@@ -301,7 +301,7 @@ ENTRY(iwmmxt_task_restore)
|
||||
mov r3, lr @ preserve return address
|
||||
bl concan_load
|
||||
msr cpsr_c, ip @ restore interrupt mode
|
||||
mov pc, r3
|
||||
ret r3
|
||||
|
||||
/*
|
||||
* Concan handling on task switch
|
||||
@@ -323,7 +323,7 @@ ENTRY(iwmmxt_task_switch)
|
||||
add r3, r0, #TI_IWMMXT_STATE @ get next task Concan save area
|
||||
ldr r2, [r2] @ get current Concan owner
|
||||
teq r2, r3 @ next task owns it?
|
||||
movne pc, lr @ no: leave Concan disabled
|
||||
retne lr @ no: leave Concan disabled
|
||||
|
||||
1: @ flip Concan access
|
||||
XSC(eor r1, r1, #0x3)
|
||||
@@ -350,7 +350,7 @@ ENTRY(iwmmxt_task_release)
|
||||
eors r0, r0, r1 @ if equal...
|
||||
streq r0, [r3] @ then clear ownership
|
||||
msr cpsr_c, r2 @ restore interrupts
|
||||
mov pc, lr
|
||||
ret lr
|
||||
|
||||
.data
|
||||
concan_owner:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*/
|
||||
|
||||
#include <linux/linkage.h>
|
||||
#include <asm/assembler.h>
|
||||
#include <asm/kexec.h>
|
||||
|
||||
.align 3 /* not needed for this code, but keeps fncpy() happy */
|
||||
@@ -59,7 +60,7 @@ ENTRY(relocate_new_kernel)
|
||||
mov r0,#0
|
||||
ldr r1,kexec_mach_type
|
||||
ldr r2,kexec_boot_atags
|
||||
ARM( mov pc, lr )
|
||||
ARM( ret lr )
|
||||
THUMB( bx lr )
|
||||
|
||||
.align
|
||||
|
||||
@@ -107,7 +107,7 @@ ENTRY(cpu_resume_mmu)
|
||||
instr_sync
|
||||
mov r0, r0
|
||||
mov r0, r0
|
||||
mov pc, r3 @ jump to virtual address
|
||||
ret r3 @ jump to virtual address
|
||||
ENDPROC(cpu_resume_mmu)
|
||||
.popsection
|
||||
cpu_resume_after_mmu:
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
*/
|
||||
|
||||
#include <linux/linkage.h>
|
||||
#include <asm/assembler.h>
|
||||
#include <asm/unified.h>
|
||||
#include <asm/asm-offsets.h>
|
||||
#include <asm/kvm_asm.h>
|
||||
@@ -134,7 +135,7 @@ phase2:
|
||||
ldr r0, =TRAMPOLINE_VA
|
||||
adr r1, target
|
||||
bfi r0, r1, #0, #PAGE_SHIFT
|
||||
mov pc, r0
|
||||
ret r0
|
||||
|
||||
target: @ We're now in the trampoline code, switch page tables
|
||||
mcrr p15, 4, r2, r3, c2
|
||||
|
||||
@@ -27,6 +27,7 @@ Boston, MA 02110-1301, USA. */
|
||||
|
||||
|
||||
#include <linux/linkage.h>
|
||||
#include <asm/assembler.h>
|
||||
|
||||
#ifdef __ARMEB__
|
||||
#define al r1
|
||||
@@ -47,7 +48,7 @@ ENTRY(__aeabi_llsl)
|
||||
THUMB( lsrmi r3, al, ip )
|
||||
THUMB( orrmi ah, ah, r3 )
|
||||
mov al, al, lsl r2
|
||||
mov pc, lr
|
||||
ret lr
|
||||
|
||||
ENDPROC(__ashldi3)
|
||||
ENDPROC(__aeabi_llsl)
|
||||
|
||||
@@ -27,6 +27,7 @@ Boston, MA 02110-1301, USA. */
|
||||
|
||||
|
||||
#include <linux/linkage.h>
|
||||
#include <asm/assembler.h>
|
||||
|
||||
#ifdef __ARMEB__
|
||||
#define al r1
|
||||
@@ -47,7 +48,7 @@ ENTRY(__aeabi_lasr)
|
||||
THUMB( lslmi r3, ah, ip )
|
||||
THUMB( orrmi al, al, r3 )
|
||||
mov ah, ah, asr r2
|
||||
mov pc, lr
|
||||
ret lr
|
||||
|
||||
ENDPROC(__ashrdi3)
|
||||
ENDPROC(__aeabi_lasr)
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
ENTRY(c_backtrace)
|
||||
|
||||
#if !defined(CONFIG_FRAME_POINTER) || !defined(CONFIG_PRINTK)
|
||||
mov pc, lr
|
||||
ret lr
|
||||
ENDPROC(c_backtrace)
|
||||
#else
|
||||
stmfd sp!, {r4 - r8, lr} @ Save an extra register so we have a location...
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#include <asm/assembler.h>
|
||||
#include <asm/unwind.h>
|
||||
|
||||
#if __LINUX_ARM_ARCH__ >= 6
|
||||
@@ -70,7 +71,7 @@ UNWIND( .fnstart )
|
||||
\instr r2, r2, r3
|
||||
str r2, [r1, r0, lsl #2]
|
||||
restore_irqs ip
|
||||
mov pc, lr
|
||||
ret lr
|
||||
UNWIND( .fnend )
|
||||
ENDPROC(\name )
|
||||
.endm
|
||||
@@ -98,7 +99,7 @@ UNWIND( .fnstart )
|
||||
\store r2, [r1]
|
||||
moveq r0, #0
|
||||
restore_irqs ip
|
||||
mov pc, lr
|
||||
ret lr
|
||||
UNWIND( .fnend )
|
||||
ENDPROC(\name )
|
||||
.endm
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user