diff options
author | Ard Biesheuvel <ard.biesheuvel@linaro.org> | 2015-10-08 15:02:03 -0400 |
---|---|---|
committer | Catalin Marinas <catalin.marinas@arm.com> | 2015-10-12 11:19:45 -0400 |
commit | 207918461eb0aca720fddec5da79bc71c133b9f1 (patch) | |
tree | 3e6133bef93e5ed5e0610ca6f536b23e97d0b736 | |
parent | d4dddfdbbc75f46d2cbab4e9f421999452617d64 (diff) |
arm64: use ENDPIPROC() to annotate position independent assembler routines
For more control over which functions are called with the MMU off or
with the UEFI 1:1 mapping active, annotate some assembler routines as
position independent. This is done by introducing ENDPIPROC(), which
replaces the ENDPROC() declaration of those routines.
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
-rw-r--r-- | arch/arm64/include/asm/assembler.h | 11 | ||||
-rw-r--r-- | arch/arm64/lib/memchr.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memcmp.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memcpy.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memmove.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/memset.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/strcmp.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/strlen.S | 2 | ||||
-rw-r--r-- | arch/arm64/lib/strncmp.S | 2 | ||||
-rw-r--r-- | arch/arm64/mm/cache.S | 10 |
10 files changed, 24 insertions, 13 deletions
diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h index b51f2cc22ca9..12eff928ef8b 100644 --- a/arch/arm64/include/asm/assembler.h +++ b/arch/arm64/include/asm/assembler.h | |||
@@ -193,4 +193,15 @@ lr .req x30 // link register | |||
193 | str \src, [\tmp, :lo12:\sym] | 193 | str \src, [\tmp, :lo12:\sym] |
194 | .endm | 194 | .endm |
195 | 195 | ||
196 | /* | ||
197 | * Annotate a function as position independent, i.e., safe to be called before | ||
198 | * the kernel virtual mapping is activated. | ||
199 | */ | ||
200 | #define ENDPIPROC(x) \ | ||
201 | .globl __pi_##x; \ | ||
202 | .type __pi_##x, %function; \ | ||
203 | .set __pi_##x, x; \ | ||
204 | .size __pi_##x, . - x; \ | ||
205 | ENDPROC(x) | ||
206 | |||
196 | #endif /* __ASM_ASSEMBLER_H */ | 207 | #endif /* __ASM_ASSEMBLER_H */ |
diff --git a/arch/arm64/lib/memchr.S b/arch/arm64/lib/memchr.S index 8636b7549163..4444c1d25f4b 100644 --- a/arch/arm64/lib/memchr.S +++ b/arch/arm64/lib/memchr.S | |||
@@ -41,4 +41,4 @@ ENTRY(memchr) | |||
41 | ret | 41 | ret |
42 | 2: mov x0, #0 | 42 | 2: mov x0, #0 |
43 | ret | 43 | ret |
44 | ENDPROC(memchr) | 44 | ENDPIPROC(memchr) |
diff --git a/arch/arm64/lib/memcmp.S b/arch/arm64/lib/memcmp.S index 6ea0776ba6de..ffbdec00327d 100644 --- a/arch/arm64/lib/memcmp.S +++ b/arch/arm64/lib/memcmp.S | |||
@@ -255,4 +255,4 @@ CPU_LE( rev data2, data2 ) | |||
255 | .Lret0: | 255 | .Lret0: |
256 | mov result, #0 | 256 | mov result, #0 |
257 | ret | 257 | ret |
258 | ENDPROC(memcmp) | 258 | ENDPIPROC(memcmp) |
diff --git a/arch/arm64/lib/memcpy.S b/arch/arm64/lib/memcpy.S index 173a1aace9bb..36a6a62cf263 100644 --- a/arch/arm64/lib/memcpy.S +++ b/arch/arm64/lib/memcpy.S | |||
@@ -71,4 +71,4 @@ | |||
71 | ENTRY(memcpy) | 71 | ENTRY(memcpy) |
72 | #include "copy_template.S" | 72 | #include "copy_template.S" |
73 | ret | 73 | ret |
74 | ENDPROC(memcpy) | 74 | ENDPIPROC(memcpy) |
diff --git a/arch/arm64/lib/memmove.S b/arch/arm64/lib/memmove.S index 57b19ea2dad4..68e2f2035e23 100644 --- a/arch/arm64/lib/memmove.S +++ b/arch/arm64/lib/memmove.S | |||
@@ -194,4 +194,4 @@ ENTRY(memmove) | |||
194 | tst count, #0x3f | 194 | tst count, #0x3f |
195 | b.ne .Ltail63 | 195 | b.ne .Ltail63 |
196 | ret | 196 | ret |
197 | ENDPROC(memmove) | 197 | ENDPIPROC(memmove) |
diff --git a/arch/arm64/lib/memset.S b/arch/arm64/lib/memset.S index 7c72dfd36b63..29f405f08792 100644 --- a/arch/arm64/lib/memset.S +++ b/arch/arm64/lib/memset.S | |||
@@ -213,4 +213,4 @@ ENTRY(memset) | |||
213 | ands count, count, zva_bits_x | 213 | ands count, count, zva_bits_x |
214 | b.ne .Ltail_maybe_long | 214 | b.ne .Ltail_maybe_long |
215 | ret | 215 | ret |
216 | ENDPROC(memset) | 216 | ENDPIPROC(memset) |
diff --git a/arch/arm64/lib/strcmp.S b/arch/arm64/lib/strcmp.S index 42f828b06c59..471fe61760ef 100644 --- a/arch/arm64/lib/strcmp.S +++ b/arch/arm64/lib/strcmp.S | |||
@@ -231,4 +231,4 @@ CPU_BE( orr syndrome, diff, has_nul ) | |||
231 | lsr data1, data1, #56 | 231 | lsr data1, data1, #56 |
232 | sub result, data1, data2, lsr #56 | 232 | sub result, data1, data2, lsr #56 |
233 | ret | 233 | ret |
234 | ENDPROC(strcmp) | 234 | ENDPIPROC(strcmp) |
diff --git a/arch/arm64/lib/strlen.S b/arch/arm64/lib/strlen.S index 987b68b9ce44..55ccc8e24c08 100644 --- a/arch/arm64/lib/strlen.S +++ b/arch/arm64/lib/strlen.S | |||
@@ -123,4 +123,4 @@ CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */ | |||
123 | csinv data1, data1, xzr, le | 123 | csinv data1, data1, xzr, le |
124 | csel data2, data2, data2a, le | 124 | csel data2, data2, data2a, le |
125 | b .Lrealigned | 125 | b .Lrealigned |
126 | ENDPROC(strlen) | 126 | ENDPIPROC(strlen) |
diff --git a/arch/arm64/lib/strncmp.S b/arch/arm64/lib/strncmp.S index 0224cf5a5533..e267044761c6 100644 --- a/arch/arm64/lib/strncmp.S +++ b/arch/arm64/lib/strncmp.S | |||
@@ -307,4 +307,4 @@ CPU_BE( orr syndrome, diff, has_nul ) | |||
307 | .Lret0: | 307 | .Lret0: |
308 | mov result, #0 | 308 | mov result, #0 |
309 | ret | 309 | ret |
310 | ENDPROC(strncmp) | 310 | ENDPIPROC(strncmp) |
diff --git a/arch/arm64/mm/cache.S b/arch/arm64/mm/cache.S index eb48d5df4a0f..cfa44a6adc0a 100644 --- a/arch/arm64/mm/cache.S +++ b/arch/arm64/mm/cache.S | |||
@@ -98,7 +98,7 @@ ENTRY(__flush_dcache_area) | |||
98 | b.lo 1b | 98 | b.lo 1b |
99 | dsb sy | 99 | dsb sy |
100 | ret | 100 | ret |
101 | ENDPROC(__flush_dcache_area) | 101 | ENDPIPROC(__flush_dcache_area) |
102 | 102 | ||
103 | /* | 103 | /* |
104 | * __inval_cache_range(start, end) | 104 | * __inval_cache_range(start, end) |
@@ -131,7 +131,7 @@ __dma_inv_range: | |||
131 | b.lo 2b | 131 | b.lo 2b |
132 | dsb sy | 132 | dsb sy |
133 | ret | 133 | ret |
134 | ENDPROC(__inval_cache_range) | 134 | ENDPIPROC(__inval_cache_range) |
135 | ENDPROC(__dma_inv_range) | 135 | ENDPROC(__dma_inv_range) |
136 | 136 | ||
137 | /* | 137 | /* |
@@ -171,7 +171,7 @@ ENTRY(__dma_flush_range) | |||
171 | b.lo 1b | 171 | b.lo 1b |
172 | dsb sy | 172 | dsb sy |
173 | ret | 173 | ret |
174 | ENDPROC(__dma_flush_range) | 174 | ENDPIPROC(__dma_flush_range) |
175 | 175 | ||
176 | /* | 176 | /* |
177 | * __dma_map_area(start, size, dir) | 177 | * __dma_map_area(start, size, dir) |
@@ -184,7 +184,7 @@ ENTRY(__dma_map_area) | |||
184 | cmp w2, #DMA_FROM_DEVICE | 184 | cmp w2, #DMA_FROM_DEVICE |
185 | b.eq __dma_inv_range | 185 | b.eq __dma_inv_range |
186 | b __dma_clean_range | 186 | b __dma_clean_range |
187 | ENDPROC(__dma_map_area) | 187 | ENDPIPROC(__dma_map_area) |
188 | 188 | ||
189 | /* | 189 | /* |
190 | * __dma_unmap_area(start, size, dir) | 190 | * __dma_unmap_area(start, size, dir) |
@@ -197,4 +197,4 @@ ENTRY(__dma_unmap_area) | |||
197 | cmp w2, #DMA_TO_DEVICE | 197 | cmp w2, #DMA_TO_DEVICE |
198 | b.ne __dma_inv_range | 198 | b.ne __dma_inv_range |
199 | ret | 199 | ret |
200 | ENDPROC(__dma_unmap_area) | 200 | ENDPIPROC(__dma_unmap_area) |