diff options
author | Ard Biesheuvel <ard.biesheuvel@linaro.org> | 2015-10-08 20:02:03 +0100 |
---|---|---|
committer | Catalin Marinas <catalin.marinas@arm.com> | 2015-10-12 16:19:45 +0100 |
commit | 207918461eb0aca720fddec5da79bc71c133b9f1 (patch) | |
tree | 3e6133bef93e5ed5e0610ca6f536b23e97d0b736 /arch/arm64 | |
parent | d4dddfdbbc75f46d2cbab4e9f421999452617d64 (diff) |
arm64: use ENDPIPROC() to annotate position independent assembler routines
For more control over which functions are called with the MMU off or
with the UEFI 1:1 mapping active, annotate some assembler routines as
position independent. This is done by introducing ENDPIPROC(), which
replaces the ENDPROC() declaration of those routines.
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64')
-rw-r--r-- | arch/arm64/include/asm/assembler.h | 11 | ||||
-rw-r--r-- | arch/arm64/lib/memchr.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memcmp.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memcpy.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memmove.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memset.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/strcmp.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/strlen.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/strncmp.S | 2 | ||||
-rw-r--r-- | arch/arm64/mm/cache.S | 10 |
10 files changed, 24 insertions, 13 deletions
diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h index b51f2cc22ca9..12eff928ef8b 100644 --- a/arch/arm64/include/asm/assembler.h +++ b/arch/arm64/include/asm/assembler.h @@ -193,4 +193,15 @@ lr .req x30 // link register str \src, [\tmp, :lo12:\sym] .endm +/* + * Annotate a function as position independent, i.e., safe to be called before + * the kernel virtual mapping is activated. + */ +#define ENDPIPROC(x) \ + .globl __pi_##x; \ + .type __pi_##x, %function; \ + .set __pi_##x, x; \ + .size __pi_##x, . - x; \ + ENDPROC(x) + #endif /* __ASM_ASSEMBLER_H */ diff --git a/arch/arm64/lib/memchr.S b/arch/arm64/lib/memchr.S index 8636b7549163..4444c1d25f4b 100644 --- a/arch/arm64/lib/memchr.S +++ b/arch/arm64/lib/memchr.S @@ -41,4 +41,4 @@ ENTRY(memchr) ret 2: mov x0, #0 ret -ENDPROC(memchr) +ENDPIPROC(memchr) diff --git a/arch/arm64/lib/memcmp.S b/arch/arm64/lib/memcmp.S index 6ea0776ba6de..ffbdec00327d 100644 --- a/arch/arm64/lib/memcmp.S +++ b/arch/arm64/lib/memcmp.S @@ -255,4 +255,4 @@ CPU_LE( rev data2, data2 ) .Lret0: mov result, #0 ret -ENDPROC(memcmp) +ENDPIPROC(memcmp) diff --git a/arch/arm64/lib/memcpy.S b/arch/arm64/lib/memcpy.S index 173a1aace9bb..36a6a62cf263 100644 --- a/arch/arm64/lib/memcpy.S +++ b/arch/arm64/lib/memcpy.S @@ -71,4 +71,4 @@ ENTRY(memcpy) #include "copy_template.S" ret -ENDPROC(memcpy) +ENDPIPROC(memcpy) diff --git a/arch/arm64/lib/memmove.S b/arch/arm64/lib/memmove.S index 57b19ea2dad4..68e2f2035e23 100644 --- a/arch/arm64/lib/memmove.S +++ b/arch/arm64/lib/memmove.S @@ -194,4 +194,4 @@ ENTRY(memmove) tst count, #0x3f b.ne .Ltail63 ret -ENDPROC(memmove) +ENDPIPROC(memmove) diff --git a/arch/arm64/lib/memset.S b/arch/arm64/lib/memset.S index 7c72dfd36b63..29f405f08792 100644 --- a/arch/arm64/lib/memset.S +++ b/arch/arm64/lib/memset.S @@ -213,4 +213,4 @@ ENTRY(memset) ands count, count, zva_bits_x b.ne .Ltail_maybe_long ret -ENDPROC(memset) +ENDPIPROC(memset) diff --git a/arch/arm64/lib/strcmp.S b/arch/arm64/lib/strcmp.S index 42f828b06c59..471fe61760ef 100644 --- a/arch/arm64/lib/strcmp.S +++ b/arch/arm64/lib/strcmp.S @@ -231,4 +231,4 @@ CPU_BE( orr syndrome, diff, has_nul ) lsr data1, data1, #56 sub result, data1, data2, lsr #56 ret -ENDPROC(strcmp) +ENDPIPROC(strcmp) diff --git a/arch/arm64/lib/strlen.S b/arch/arm64/lib/strlen.S index 987b68b9ce44..55ccc8e24c08 100644 --- a/arch/arm64/lib/strlen.S +++ b/arch/arm64/lib/strlen.S @@ -123,4 +123,4 @@ CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */ csinv data1, data1, xzr, le csel data2, data2, data2a, le b .Lrealigned -ENDPROC(strlen) +ENDPIPROC(strlen) diff --git a/arch/arm64/lib/strncmp.S b/arch/arm64/lib/strncmp.S index 0224cf5a5533..e267044761c6 100644 --- a/arch/arm64/lib/strncmp.S +++ b/arch/arm64/lib/strncmp.S @@ -307,4 +307,4 @@ CPU_BE( orr syndrome, diff, has_nul ) .Lret0: mov result, #0 ret -ENDPROC(strncmp) +ENDPIPROC(strncmp) diff --git a/arch/arm64/mm/cache.S b/arch/arm64/mm/cache.S index eb48d5df4a0f..cfa44a6adc0a 100644 --- a/arch/arm64/mm/cache.S +++ b/arch/arm64/mm/cache.S @@ -98,7 +98,7 @@ ENTRY(__flush_dcache_area) b.lo 1b dsb sy ret -ENDPROC(__flush_dcache_area) +ENDPIPROC(__flush_dcache_area) /* * __inval_cache_range(start, end) @@ -131,7 +131,7 @@ __dma_inv_range: b.lo 2b dsb sy ret -ENDPROC(__inval_cache_range) +ENDPIPROC(__inval_cache_range) ENDPROC(__dma_inv_range) /* @@ -171,7 +171,7 @@ ENTRY(__dma_flush_range) b.lo 1b dsb sy ret -ENDPROC(__dma_flush_range) +ENDPIPROC(__dma_flush_range) /* * __dma_map_area(start, size, dir) @@ -184,7 +184,7 @@ ENTRY(__dma_map_area) cmp w2, #DMA_FROM_DEVICE b.eq __dma_inv_range b __dma_clean_range -ENDPROC(__dma_map_area) +ENDPIPROC(__dma_map_area) /* * __dma_unmap_area(start, size, dir) @@ -197,4 +197,4 @@ ENTRY(__dma_unmap_area) cmp w2, #DMA_TO_DEVICE b.ne __dma_inv_range ret -ENDPROC(__dma_unmap_area) +ENDPIPROC(__dma_unmap_area) |