diff options
author | Dave Martin <dave.martin@linaro.org> | 2012-02-10 18:07:07 -0800 |
---|---|---|
committer | Marc Zyngier <marc.zyngier@arm.com> | 2012-09-19 08:32:50 +0100 |
commit | 424e5994e63326a42012f003f1174f3c363c7b62 (patch) | |
tree | d8ff950676dac0fc3b38d6801423f996f8b20214 /arch/arm/boot/compressed/head.S | |
parent | 80c59dafb1a9a86fa996e6e34d06b60567c925ca (diff) |
ARM: zImage/virt: hyp mode entry support for the zImage loader
The zImage loader needs to turn on the MMU in order to take
advantage of caching while decompressing the zImage. Running this
in hyp mode would require the LPAE pagetable format to be
supported; to avoid this complexity, this patch switches out of hyp
mode, and returns back to hyp mode just before booting the kernel.
This implementation assumes that the Hyp mode view of memory and the
PL1 view of memory are coherent, providing that the MMU and caches
are off in both, as required by the boot protocol. The zImage
decompression code must drain the write buffer on completion anyway, and
entry into Hyp mode should flush any prefetch buffer, avoiding hazards
associated with local write buffers and the pipeline.
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Signed-off-by: Marc Zyngier <marc.zyngier@arm.com>
Diffstat (limited to 'arch/arm/boot/compressed/head.S')
-rw-r--r-- | arch/arm/boot/compressed/head.S | 71 |
1 files changed, 64 insertions, 7 deletions
diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S index b8c64b80bafc..0749a6184abf 100644 --- a/arch/arm/boot/compressed/head.S +++ b/arch/arm/boot/compressed/head.S @@ -9,6 +9,7 @@ * published by the Free Software Foundation. */ #include <linux/linkage.h> +#include <asm/assembler.h> /* * Debugging stuff @@ -132,7 +133,12 @@ start: .word start @ absolute load/run zImage address .word _edata @ zImage end address THUMB( .thumb ) -1: mov r7, r1 @ save architecture ID +1: + mrs r9, cpsr +#ifdef CONFIG_ARM_VIRT_EXT + bl __hyp_stub_install @ get into SVC mode, reversibly +#endif + mov r7, r1 @ save architecture ID mov r8, r2 @ save atags pointer #ifndef __ARM_ARCH_2__ @@ -148,9 +154,9 @@ start: ARM( swi 0x123456 ) @ angel_SWI_ARM THUMB( svc 0xab ) @ angel_SWI_THUMB not_angel: - mrs r2, cpsr @ turn off interrupts to - orr r2, r2, #0xc0 @ prevent angel from running - msr cpsr_c, r2 + safe_svcmode_maskall r0 + msr spsr_cxsf, r9 @ Save the CPU boot mode in + @ SPSR #else teqp pc, #0x0c000003 @ turn off interrupts #endif @@ -350,6 +356,20 @@ dtb_check_done: adr r5, restart bic r5, r5, #31 +/* Relocate the hyp vector base if necessary */ +#ifdef CONFIG_ARM_VIRT_EXT + mrs r0, spsr + and r0, r0, #MODE_MASK + cmp r0, #HYP_MODE + bne 1f + + bl __hyp_get_vectors + sub r0, r0, r5 + add r0, r0, r10 + bl __hyp_set_vectors +1: +#endif + sub r9, r6, r5 @ size to copy add r9, r9, #31 @ rounded up to a multiple bic r9, r9, #31 @ ... of 32 bytes @@ -458,11 +478,29 @@ not_relocated: mov r0, #0 bl decompress_kernel bl cache_clean_flush bl cache_off - mov r0, #0 @ must be zero mov r1, r7 @ restore architecture number mov r2, r8 @ restore atags pointer - ARM( mov pc, r4 ) @ call kernel - THUMB( bx r4 ) @ entry point is always ARM + +#ifdef CONFIG_ARM_VIRT_EXT + mrs r0, spsr @ Get saved CPU boot mode + and r0, r0, #MODE_MASK + cmp r0, #HYP_MODE @ if not booted in HYP mode... + bne __enter_kernel @ boot kernel directly + + adr r12, .L__hyp_reentry_vectors_offset + ldr r0, [r12] + add r0, r0, r12 + + bl __hyp_set_vectors + __HVC(0) @ otherwise bounce to hyp mode + + b . @ should never be reached + + .align 2 +.L__hyp_reentry_vectors_offset: .long __hyp_reentry_vectors - . +#else + b __enter_kernel +#endif .align 2 .type LC0, #object @@ -1191,6 +1229,25 @@ memdump: mov r12, r0 #endif .ltorg + +#ifdef CONFIG_ARM_VIRT_EXT +.align 5 +__hyp_reentry_vectors: + W(b) . @ reset + W(b) . @ undef + W(b) . @ svc + W(b) . @ pabort + W(b) . @ dabort + W(b) __enter_kernel @ hyp + W(b) . @ irq + W(b) . @ fiq +#endif /* CONFIG_ARM_VIRT_EXT */ + +__enter_kernel: + mov r0, #0 @ must be 0 + ARM( mov pc, r4 ) @ call kernel + THUMB( bx r4 ) @ entry point is always ARM + reloc_code_end: .align |