[ARM] 3115/1: small optimizations to exception vector entry code
Patch from Nicolas Pitre Since we know the value of cpsr on entry, we can replace the bic+orr with a single eor. Also remove a possible result delay (at least on XScale). Signed-off-by: Nicolas Pitre <nico@cam.org> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
This commit is contained in:
committed by
Russell King
parent
7240f1f183
commit
b7ec479553
@@ -785,7 +785,7 @@ __kuser_helper_end:
|
|||||||
* SP points to a minimal amount of processor-private memory, the address
|
* SP points to a minimal amount of processor-private memory, the address
|
||||||
* of which is copied into r0 for the mode specific abort handler.
|
* of which is copied into r0 for the mode specific abort handler.
|
||||||
*/
|
*/
|
||||||
.macro vector_stub, name, correction=0
|
.macro vector_stub, name, mode, correction=0
|
||||||
.align 5
|
.align 5
|
||||||
|
|
||||||
vector_\name:
|
vector_\name:
|
||||||
@@ -805,15 +805,14 @@ vector_\name:
|
|||||||
@ Prepare for SVC32 mode. IRQs remain disabled.
|
@ Prepare for SVC32 mode. IRQs remain disabled.
|
||||||
@
|
@
|
||||||
mrs r0, cpsr
|
mrs r0, cpsr
|
||||||
bic r0, r0, #MODE_MASK
|
eor r0, r0, #(\mode ^ SVC_MODE)
|
||||||
orr r0, r0, #SVC_MODE
|
|
||||||
msr spsr_cxsf, r0
|
msr spsr_cxsf, r0
|
||||||
|
|
||||||
@
|
@
|
||||||
@ the branch table must immediately follow this code
|
@ the branch table must immediately follow this code
|
||||||
@
|
@
|
||||||
mov r0, sp
|
|
||||||
and lr, lr, #0x0f
|
and lr, lr, #0x0f
|
||||||
|
mov r0, sp
|
||||||
ldr lr, [pc, lr, lsl #2]
|
ldr lr, [pc, lr, lsl #2]
|
||||||
movs pc, lr @ branch to handler in SVC mode
|
movs pc, lr @ branch to handler in SVC mode
|
||||||
.endm
|
.endm
|
||||||
@@ -823,7 +822,7 @@ __stubs_start:
|
|||||||
/*
|
/*
|
||||||
* Interrupt dispatcher
|
* Interrupt dispatcher
|
||||||
*/
|
*/
|
||||||
vector_stub irq, 4
|
vector_stub irq, IRQ_MODE, 4
|
||||||
|
|
||||||
.long __irq_usr @ 0 (USR_26 / USR_32)
|
.long __irq_usr @ 0 (USR_26 / USR_32)
|
||||||
.long __irq_invalid @ 1 (FIQ_26 / FIQ_32)
|
.long __irq_invalid @ 1 (FIQ_26 / FIQ_32)
|
||||||
@@ -846,7 +845,7 @@ __stubs_start:
|
|||||||
* Data abort dispatcher
|
* Data abort dispatcher
|
||||||
* Enter in ABT mode, spsr = USR CPSR, lr = USR PC
|
* Enter in ABT mode, spsr = USR CPSR, lr = USR PC
|
||||||
*/
|
*/
|
||||||
vector_stub dabt, 8
|
vector_stub dabt, ABT_MODE, 8
|
||||||
|
|
||||||
.long __dabt_usr @ 0 (USR_26 / USR_32)
|
.long __dabt_usr @ 0 (USR_26 / USR_32)
|
||||||
.long __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
|
.long __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
|
||||||
@@ -869,7 +868,7 @@ __stubs_start:
|
|||||||
* Prefetch abort dispatcher
|
* Prefetch abort dispatcher
|
||||||
* Enter in ABT mode, spsr = USR CPSR, lr = USR PC
|
* Enter in ABT mode, spsr = USR CPSR, lr = USR PC
|
||||||
*/
|
*/
|
||||||
vector_stub pabt, 4
|
vector_stub pabt, ABT_MODE, 4
|
||||||
|
|
||||||
.long __pabt_usr @ 0 (USR_26 / USR_32)
|
.long __pabt_usr @ 0 (USR_26 / USR_32)
|
||||||
.long __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
|
.long __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
|
||||||
@@ -892,7 +891,7 @@ __stubs_start:
|
|||||||
* Undef instr entry dispatcher
|
* Undef instr entry dispatcher
|
||||||
* Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
|
* Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
|
||||||
*/
|
*/
|
||||||
vector_stub und
|
vector_stub und, UND_MODE
|
||||||
|
|
||||||
.long __und_usr @ 0 (USR_26 / USR_32)
|
.long __und_usr @ 0 (USR_26 / USR_32)
|
||||||
.long __und_invalid @ 1 (FIQ_26 / FIQ_32)
|
.long __und_invalid @ 1 (FIQ_26 / FIQ_32)
|
||||||
|
Reference in New Issue
Block a user