summaryrefslogtreecommitdiff
path: root/arch/arm/kernel
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2021-12-15 09:31:36 +0100
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2021-12-29 12:23:37 +0100
commit5478b90270a3569cf6f596959b0900c9f8ef8fe4 (patch)
treec6b396d267626cadc046eee92f7ab9711656bc41 /arch/arm/kernel
parent1c3d4122bec632ffbd6c40ef9321b17a55ff810d (diff)
ARM: 9169/1: entry: fix Thumb2 bug in iWMMXt exception handling
commit 8536a5ef886005bc443c2da9b842d69fd3d7647f upstream. The Thumb2 version of the FP exception handling entry code treats the register holding the CP number (R8) differently, resulting in the iWMMXT CP number check to be incorrect. Fix this by unifying the ARM and Thumb2 code paths, and switch the order of the additions of the TI_USED_CP offset and the shifted CP index. Cc: <stable@vger.kernel.org> Fixes: b86040a59feb ("Thumb-2: Implementation of the unified start-up and exceptions code") Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Diffstat (limited to 'arch/arm/kernel')
-rw-r--r--arch/arm/kernel/entry-armv.S8
1 files changed, 3 insertions, 5 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index b62d74a2c73a..4937d514318e 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -596,11 +596,9 @@ call_fpe:
tstne r0, #0x04000000 @ bit 26 set on both ARM and Thumb-2
reteq lr
and r8, r0, #0x00000f00 @ mask out CP number
- THUMB( lsr r8, r8, #8 )
mov r7, #1
- add r6, r10, #TI_USED_CP
- ARM( strb r7, [r6, r8, lsr #8] ) @ set appropriate used_cp[]
- THUMB( strb r7, [r6, r8] ) @ set appropriate used_cp[]
+ add r6, r10, r8, lsr #8 @ add used_cp[] array offset first
+ strb r7, [r6, #TI_USED_CP] @ set appropriate used_cp[]
#ifdef CONFIG_IWMMXT
@ Test if we need to give access to iWMMXt coprocessors
ldr r5, [r10, #TI_FLAGS]
@@ -609,7 +607,7 @@ call_fpe:
bcs iwmmxt_task_enable
#endif
ARM( add pc, pc, r8, lsr #6 )
- THUMB( lsl r8, r8, #2 )
+ THUMB( lsr r8, r8, #6 )
THUMB( add pc, r8 )
nop