aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/lib/memzero.S
diff options
context:
space:
mode:
authorLin Yongting <linyongting@gmail.com>2014-11-26 08:33:49 -0500
committerRussell King <rmk+kernel@arm.linux.org.uk>2014-11-27 11:00:23 -0500
commit20cb6abfe0062693183eb2a85f7c73792e96407f (patch)
tree8a1989fcfd9b1233d8aeed734e0e1e66147eb22b /arch/arm/lib/memzero.S
parent296630c9c8c6ff5f116b417fafa6067d4c7a786e (diff)
ARM: 8223/1: Add unwinding support for __memzero function
The __memzero function never had unwinding annotations added. Currently, when accessing invalid pointer by __memzero occurs the backtrace shown will stop at __memzero or some completely unrelated function. Add unwinding annotations in hopes of getting a more useful backtrace in following cases: 1. die on accessing invalid pointer by __memzero 2. kprobe trapped at any instruction within __memzero 3. interrupted at any instruction within __memzero Signed-off-by: Lin Yongting <linyongting@gmail.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/lib/memzero.S')
-rw-r--r--arch/arm/lib/memzero.S12
1 files changed, 12 insertions, 0 deletions
diff --git a/arch/arm/lib/memzero.S b/arch/arm/lib/memzero.S
index 385ccb306fa2..0eded952e089 100644
--- a/arch/arm/lib/memzero.S
+++ b/arch/arm/lib/memzero.S
@@ -9,6 +9,7 @@
9 */ 9 */
10#include <linux/linkage.h> 10#include <linux/linkage.h>
11#include <asm/assembler.h> 11#include <asm/assembler.h>
12#include <asm/unwind.h>
12 13
13 .text 14 .text
14 .align 5 15 .align 5
@@ -18,6 +19,7 @@
18 * mis-aligned by, and r1 is the number of bytes. If r1 < 4, then we 19 * mis-aligned by, and r1 is the number of bytes. If r1 < 4, then we
19 * don't bother; we use byte stores instead. 20 * don't bother; we use byte stores instead.
20 */ 21 */
22UNWIND( .fnstart )
211: subs r1, r1, #4 @ 1 do we have enough 231: subs r1, r1, #4 @ 1 do we have enough
22 blt 5f @ 1 bytes to align with? 24 blt 5f @ 1 bytes to align with?
23 cmp r3, #2 @ 1 25 cmp r3, #2 @ 1
@@ -47,6 +49,9 @@ ENTRY(__memzero)
47 * use the LR 49 * use the LR
48 */ 50 */
49 str lr, [sp, #-4]! @ 1 51 str lr, [sp, #-4]! @ 1
52UNWIND( .fnend )
53UNWIND( .fnstart )
54UNWIND( .save {lr} )
50 mov ip, r2 @ 1 55 mov ip, r2 @ 1
51 mov lr, r2 @ 1 56 mov lr, r2 @ 1
52 57
@@ -66,6 +71,7 @@ ENTRY(__memzero)
66 tst r1, #16 @ 1 16 bytes or more? 71 tst r1, #16 @ 1 16 bytes or more?
67 stmneia r0!, {r2, r3, ip, lr} @ 4 72 stmneia r0!, {r2, r3, ip, lr} @ 4
68 ldr lr, [sp], #4 @ 1 73 ldr lr, [sp], #4 @ 1
74UNWIND( .fnend )
69 75
70#else 76#else
71 77
@@ -75,6 +81,9 @@ ENTRY(__memzero)
75 */ 81 */
76 82
77 stmfd sp!, {r4-r7, lr} 83 stmfd sp!, {r4-r7, lr}
84UNWIND( .fnend )
85UNWIND( .fnstart )
86UNWIND( .save {r4-r7, lr} )
78 mov r4, r2 87 mov r4, r2
79 mov r5, r2 88 mov r5, r2
80 mov r6, r2 89 mov r6, r2
@@ -105,9 +114,11 @@ ENTRY(__memzero)
105 tst r1, #16 114 tst r1, #16
106 stmneia r0!, {r4-r7} 115 stmneia r0!, {r4-r7}
107 ldmfd sp!, {r4-r7, lr} 116 ldmfd sp!, {r4-r7, lr}
117UNWIND( .fnend )
108 118
109#endif 119#endif
110 120
121UNWIND( .fnstart )
1114: tst r1, #8 @ 1 8 bytes or more? 1224: tst r1, #8 @ 1 8 bytes or more?
112 stmneia r0!, {r2, r3} @ 2 123 stmneia r0!, {r2, r3} @ 2
113 tst r1, #4 @ 1 4 bytes or more? 124 tst r1, #4 @ 1 4 bytes or more?
@@ -122,4 +133,5 @@ ENTRY(__memzero)
122 tst r1, #1 @ 1 a byte left over 133 tst r1, #1 @ 1 a byte left over
123 strneb r2, [r0], #1 @ 1 134 strneb r2, [r0], #1 @ 1
124 ret lr @ 1 135 ret lr @ 1
136UNWIND( .fnend )
125ENDPROC(__memzero) 137ENDPROC(__memzero)