aboutsummaryrefslogtreecommitdiffstats
path: root/arch/xtensa/lib/memset.S
diff options
context:
space:
mode:
authorMax Filippov <jcmvbkbc@gmail.com>2017-12-10 00:18:47 -0500
committerMax Filippov <jcmvbkbc@gmail.com>2017-12-10 17:48:53 -0500
commit0013aceb307482ba83a5b6a29f6ba1791be0d32b (patch)
tree63f0312a68e62d0f52212ce7bdab9d75e4ee6ead /arch/xtensa/lib/memset.S
parent2da03d4114b2587f0e8e45f4862074e34daee64e (diff)
xtensa: clean up fixups in assembly code
Remove duplicate definitions of EX() and similar TRY/CATCH and SRC/DST macros from assembly sources and put single definition into asm/asmmacro.h Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
Diffstat (limited to 'arch/xtensa/lib/memset.S')
-rw-r--r--arch/xtensa/lib/memset.S36
1 files changed, 14 insertions, 22 deletions
diff --git a/arch/xtensa/lib/memset.S b/arch/xtensa/lib/memset.S
index 10b8c400f175..7a724edaf4f1 100644
--- a/arch/xtensa/lib/memset.S
+++ b/arch/xtensa/lib/memset.S
@@ -12,6 +12,7 @@
12 */ 12 */
13 13
14#include <variant/core.h> 14#include <variant/core.h>
15#include <asm/asmmacro.h>
15 16
16/* 17/*
17 * void *memset(void *dst, int c, size_t length) 18 * void *memset(void *dst, int c, size_t length)
@@ -28,15 +29,6 @@
28 * the alignment labels). 29 * the alignment labels).
29 */ 30 */
30 31
31/* Load or store instructions that may cause exceptions use the EX macro. */
32
33#define EX(insn,reg1,reg2,offset,handler) \
349: insn reg1, reg2, offset; \
35 .section __ex_table, "a"; \
36 .word 9b, handler; \
37 .previous
38
39
40.text 32.text
41.align 4 33.align 4
42.global memset 34.global memset
@@ -73,10 +65,10 @@ memset:
73 add a6, a6, a5 # a6 = end of last 16B chunk 65 add a6, a6, a5 # a6 = end of last 16B chunk
74#endif /* !XCHAL_HAVE_LOOPS */ 66#endif /* !XCHAL_HAVE_LOOPS */
75.Loop1: 67.Loop1:
76 EX(s32i, a3, a5, 0, memset_fixup) 68EX(10f) s32i a3, a5, 0
77 EX(s32i, a3, a5, 4, memset_fixup) 69EX(10f) s32i a3, a5, 4
78 EX(s32i, a3, a5, 8, memset_fixup) 70EX(10f) s32i a3, a5, 8
79 EX(s32i, a3, a5, 12, memset_fixup) 71EX(10f) s32i a3, a5, 12
80 addi a5, a5, 16 72 addi a5, a5, 16
81#if !XCHAL_HAVE_LOOPS 73#if !XCHAL_HAVE_LOOPS
82 blt a5, a6, .Loop1 74 blt a5, a6, .Loop1
@@ -84,23 +76,23 @@ memset:
84.Loop1done: 76.Loop1done:
85 bbci.l a4, 3, .L2 77 bbci.l a4, 3, .L2
86 # set 8 bytes 78 # set 8 bytes
87 EX(s32i, a3, a5, 0, memset_fixup) 79EX(10f) s32i a3, a5, 0
88 EX(s32i, a3, a5, 4, memset_fixup) 80EX(10f) s32i a3, a5, 4
89 addi a5, a5, 8 81 addi a5, a5, 8
90.L2: 82.L2:
91 bbci.l a4, 2, .L3 83 bbci.l a4, 2, .L3
92 # set 4 bytes 84 # set 4 bytes
93 EX(s32i, a3, a5, 0, memset_fixup) 85EX(10f) s32i a3, a5, 0
94 addi a5, a5, 4 86 addi a5, a5, 4
95.L3: 87.L3:
96 bbci.l a4, 1, .L4 88 bbci.l a4, 1, .L4
97 # set 2 bytes 89 # set 2 bytes
98 EX(s16i, a3, a5, 0, memset_fixup) 90EX(10f) s16i a3, a5, 0
99 addi a5, a5, 2 91 addi a5, a5, 2
100.L4: 92.L4:
101 bbci.l a4, 0, .L5 93 bbci.l a4, 0, .L5
102 # set 1 byte 94 # set 1 byte
103 EX(s8i, a3, a5, 0, memset_fixup) 95EX(10f) s8i a3, a5, 0
104.L5: 96.L5:
105.Lret1: 97.Lret1:
106 retw 98 retw
@@ -114,7 +106,7 @@ memset:
114 bbci.l a5, 0, .L20 # branch if dst alignment half-aligned 106 bbci.l a5, 0, .L20 # branch if dst alignment half-aligned
115 # dst is only byte aligned 107 # dst is only byte aligned
116 # set 1 byte 108 # set 1 byte
117 EX(s8i, a3, a5, 0, memset_fixup) 109EX(10f) s8i a3, a5, 0
118 addi a5, a5, 1 110 addi a5, a5, 1
119 addi a4, a4, -1 111 addi a4, a4, -1
120 # now retest if dst aligned 112 # now retest if dst aligned
@@ -122,7 +114,7 @@ memset:
122.L20: 114.L20:
123 # dst half-aligned 115 # dst half-aligned
124 # set 2 bytes 116 # set 2 bytes
125 EX(s16i, a3, a5, 0, memset_fixup) 117EX(10f) s16i a3, a5, 0
126 addi a5, a5, 2 118 addi a5, a5, 2
127 addi a4, a4, -2 119 addi a4, a4, -2
128 j .L0 # dst is now aligned, return to main algorithm 120 j .L0 # dst is now aligned, return to main algorithm
@@ -141,7 +133,7 @@ memset:
141 add a6, a5, a4 # a6 = ending address 133 add a6, a5, a4 # a6 = ending address
142#endif /* !XCHAL_HAVE_LOOPS */ 134#endif /* !XCHAL_HAVE_LOOPS */
143.Lbyteloop: 135.Lbyteloop:
144 EX(s8i, a3, a5, 0, memset_fixup) 136EX(10f) s8i a3, a5, 0
145 addi a5, a5, 1 137 addi a5, a5, 1
146#if !XCHAL_HAVE_LOOPS 138#if !XCHAL_HAVE_LOOPS
147 blt a5, a6, .Lbyteloop 139 blt a5, a6, .Lbyteloop
@@ -155,6 +147,6 @@ memset:
155 147
156/* We return zero if a failure occurred. */ 148/* We return zero if a failure occurred. */
157 149
158memset_fixup: 15010:
159 movi a2, 0 151 movi a2, 0
160 retw 152 retw