diff options
Diffstat (limited to 'arch/arm/lib/udivdi3.c')
-rw-r--r-- | arch/arm/lib/udivdi3.c | 372 |
1 files changed, 176 insertions, 196 deletions
diff --git a/arch/arm/lib/udivdi3.c b/arch/arm/lib/udivdi3.c index df1d5ef62bc7..e343be4c6642 100644 --- a/arch/arm/lib/udivdi3.c +++ b/arch/arm/lib/udivdi3.c | |||
@@ -32,211 +32,191 @@ Boston, MA 02111-1307, USA. */ | |||
32 | #include "gcclib.h" | 32 | #include "gcclib.h" |
33 | #include "longlong.h" | 33 | #include "longlong.h" |
34 | 34 | ||
35 | static const u8 __clz_tab[] = | 35 | static const u8 __clz_tab[] = { |
36 | { | 36 | 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, |
37 | 0,1,2,2,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5, | 37 | 5, 5, 5, 5, 5, 5, 5, 5, |
38 | 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, | 38 | 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, |
39 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, | 39 | 6, 6, 6, 6, 6, 6, 6, 6, |
40 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, | 40 | 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, |
41 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, | 41 | 7, 7, 7, 7, 7, 7, 7, 7, |
42 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, | 42 | 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, |
43 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, | 43 | 7, 7, 7, 7, 7, 7, 7, 7, |
44 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, | 44 | 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, |
45 | 8, 8, 8, 8, 8, 8, 8, 8, | ||
46 | 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, | ||
47 | 8, 8, 8, 8, 8, 8, 8, 8, | ||
48 | 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, | ||
49 | 8, 8, 8, 8, 8, 8, 8, 8, | ||
50 | 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, | ||
51 | 8, 8, 8, 8, 8, 8, 8, 8, | ||
45 | }; | 52 | }; |
46 | 53 | ||
47 | u64 | 54 | u64 __udivmoddi4(u64 n, u64 d, u64 * rp) |
48 | __udivmoddi4 (u64 n, u64 d, u64 *rp) | ||
49 | { | 55 | { |
50 | DIunion ww; | 56 | DIunion ww; |
51 | DIunion nn, dd; | 57 | DIunion nn, dd; |
52 | DIunion rr; | 58 | DIunion rr; |
53 | u32 d0, d1, n0, n1, n2; | 59 | u32 d0, d1, n0, n1, n2; |
54 | u32 q0, q1; | 60 | u32 q0, q1; |
55 | u32 b, bm; | 61 | u32 b, bm; |
56 | 62 | ||
57 | nn.ll = n; | 63 | nn.ll = n; |
58 | dd.ll = d; | 64 | dd.ll = d; |
59 | 65 | ||
60 | d0 = dd.s.low; | 66 | d0 = dd.s.low; |
61 | d1 = dd.s.high; | 67 | d1 = dd.s.high; |
62 | n0 = nn.s.low; | 68 | n0 = nn.s.low; |
63 | n1 = nn.s.high; | 69 | n1 = nn.s.high; |
64 | 70 | ||
65 | if (d1 == 0) | 71 | if (d1 == 0) { |
66 | { | 72 | if (d0 > n1) { |
67 | if (d0 > n1) | 73 | /* 0q = nn / 0D */ |
68 | { | 74 | |
69 | /* 0q = nn / 0D */ | 75 | count_leading_zeros(bm, d0); |
70 | 76 | ||
71 | count_leading_zeros (bm, d0); | 77 | if (bm != 0) { |
72 | 78 | /* Normalize, i.e. make the most significant bit of the | |
73 | if (bm != 0) | 79 | denominator set. */ |
74 | { | 80 | |
75 | /* Normalize, i.e. make the most significant bit of the | 81 | d0 = d0 << bm; |
76 | denominator set. */ | 82 | n1 = (n1 << bm) | (n0 >> (SI_TYPE_SIZE - bm)); |
77 | 83 | n0 = n0 << bm; | |
78 | d0 = d0 << bm; | 84 | } |
79 | n1 = (n1 << bm) | (n0 >> (SI_TYPE_SIZE - bm)); | 85 | |
80 | n0 = n0 << bm; | 86 | udiv_qrnnd(q0, n0, n1, n0, d0); |
81 | } | 87 | q1 = 0; |
82 | 88 | ||
83 | udiv_qrnnd (q0, n0, n1, n0, d0); | 89 | /* Remainder in n0 >> bm. */ |
84 | q1 = 0; | 90 | } else { |
85 | 91 | /* qq = NN / 0d */ | |
86 | /* Remainder in n0 >> bm. */ | 92 | |
87 | } | 93 | if (d0 == 0) |
88 | else | 94 | d0 = 1 / d0; /* Divide intentionally by zero. */ |
89 | { | 95 | |
90 | /* qq = NN / 0d */ | 96 | count_leading_zeros(bm, d0); |
91 | 97 | ||
92 | if (d0 == 0) | 98 | if (bm == 0) { |
93 | d0 = 1 / d0; /* Divide intentionally by zero. */ | 99 | /* From (n1 >= d0) /\ (the most significant bit of d0 is set), |
94 | 100 | conclude (the most significant bit of n1 is set) /\ (the | |
95 | count_leading_zeros (bm, d0); | 101 | leading quotient digit q1 = 1). |
96 | 102 | ||
97 | if (bm == 0) | 103 | This special case is necessary, not an optimization. |
98 | { | 104 | (Shifts counts of SI_TYPE_SIZE are undefined.) */ |
99 | /* From (n1 >= d0) /\ (the most significant bit of d0 is set), | 105 | |
100 | conclude (the most significant bit of n1 is set) /\ (the | 106 | n1 -= d0; |
101 | leading quotient digit q1 = 1). | 107 | q1 = 1; |
102 | 108 | } else { | |
103 | This special case is necessary, not an optimization. | 109 | /* Normalize. */ |
104 | (Shifts counts of SI_TYPE_SIZE are undefined.) */ | 110 | |
105 | 111 | b = SI_TYPE_SIZE - bm; | |
106 | n1 -= d0; | 112 | |
107 | q1 = 1; | 113 | d0 = d0 << bm; |
108 | } | 114 | n2 = n1 >> b; |
109 | else | 115 | n1 = (n1 << bm) | (n0 >> b); |
110 | { | 116 | n0 = n0 << bm; |
111 | /* Normalize. */ | 117 | |
112 | 118 | udiv_qrnnd(q1, n1, n2, n1, d0); | |
113 | b = SI_TYPE_SIZE - bm; | 119 | } |
114 | 120 | ||
115 | d0 = d0 << bm; | 121 | /* n1 != d0... */ |
116 | n2 = n1 >> b; | 122 | |
117 | n1 = (n1 << bm) | (n0 >> b); | 123 | udiv_qrnnd(q0, n0, n1, n0, d0); |
118 | n0 = n0 << bm; | 124 | |
119 | 125 | /* Remainder in n0 >> bm. */ | |
120 | udiv_qrnnd (q1, n1, n2, n1, d0); | 126 | } |
121 | } | 127 | |
122 | 128 | if (rp != 0) { | |
123 | /* n1 != d0... */ | 129 | rr.s.low = n0 >> bm; |
124 | 130 | rr.s.high = 0; | |
125 | udiv_qrnnd (q0, n0, n1, n0, d0); | 131 | *rp = rr.ll; |
126 | 132 | } | |
127 | /* Remainder in n0 >> bm. */ | 133 | } else { |
128 | } | 134 | if (d1 > n1) { |
129 | 135 | /* 00 = nn / DD */ | |
130 | if (rp != 0) | 136 | |
131 | { | 137 | q0 = 0; |
132 | rr.s.low = n0 >> bm; | 138 | q1 = 0; |
133 | rr.s.high = 0; | 139 | |
134 | *rp = rr.ll; | 140 | /* Remainder in n1n0. */ |
135 | } | 141 | if (rp != 0) { |
136 | } | 142 | rr.s.low = n0; |
137 | else | 143 | rr.s.high = n1; |
138 | { | 144 | *rp = rr.ll; |
139 | if (d1 > n1) | 145 | } |
140 | { | 146 | } else { |
141 | /* 00 = nn / DD */ | 147 | /* 0q = NN / dd */ |
142 | 148 | ||
143 | q0 = 0; | 149 | count_leading_zeros(bm, d1); |
144 | q1 = 0; | 150 | if (bm == 0) { |
145 | 151 | /* From (n1 >= d1) /\ (the most significant bit of d1 is set), | |
146 | /* Remainder in n1n0. */ | 152 | conclude (the most significant bit of n1 is set) /\ (the |
147 | if (rp != 0) | 153 | quotient digit q0 = 0 or 1). |
148 | { | 154 | |
149 | rr.s.low = n0; | 155 | This special case is necessary, not an optimization. */ |
150 | rr.s.high = n1; | 156 | |
151 | *rp = rr.ll; | 157 | /* The condition on the next line takes advantage of that |
152 | } | 158 | n1 >= d1 (true due to program flow). */ |
153 | } | 159 | if (n1 > d1 || n0 >= d0) { |
154 | else | 160 | q0 = 1; |
155 | { | 161 | sub_ddmmss(n1, n0, n1, n0, d1, d0); |
156 | /* 0q = NN / dd */ | 162 | } else |
157 | 163 | q0 = 0; | |
158 | count_leading_zeros (bm, d1); | 164 | |
159 | if (bm == 0) | 165 | q1 = 0; |
160 | { | 166 | |
161 | /* From (n1 >= d1) /\ (the most significant bit of d1 is set), | 167 | if (rp != 0) { |
162 | conclude (the most significant bit of n1 is set) /\ (the | 168 | rr.s.low = n0; |
163 | quotient digit q0 = 0 or 1). | 169 | rr.s.high = n1; |
164 | 170 | *rp = rr.ll; | |
165 | This special case is necessary, not an optimization. */ | 171 | } |
166 | 172 | } else { | |
167 | /* The condition on the next line takes advantage of that | 173 | u32 m1, m0; |
168 | n1 >= d1 (true due to program flow). */ | 174 | /* Normalize. */ |
169 | if (n1 > d1 || n0 >= d0) | 175 | |
170 | { | 176 | b = SI_TYPE_SIZE - bm; |
171 | q0 = 1; | 177 | |
172 | sub_ddmmss (n1, n0, n1, n0, d1, d0); | 178 | d1 = (d1 << bm) | (d0 >> b); |
173 | } | 179 | d0 = d0 << bm; |
174 | else | 180 | n2 = n1 >> b; |
175 | q0 = 0; | 181 | n1 = (n1 << bm) | (n0 >> b); |
176 | 182 | n0 = n0 << bm; | |
177 | q1 = 0; | 183 | |
178 | 184 | udiv_qrnnd(q0, n1, n2, n1, d1); | |
179 | if (rp != 0) | 185 | umul_ppmm(m1, m0, q0, d0); |
180 | { | 186 | |
181 | rr.s.low = n0; | 187 | if (m1 > n1 || (m1 == n1 && m0 > n0)) { |
182 | rr.s.high = n1; | 188 | q0--; |
183 | *rp = rr.ll; | 189 | sub_ddmmss(m1, m0, m1, m0, d1, d0); |
184 | } | 190 | } |
185 | } | 191 | |
186 | else | 192 | q1 = 0; |
187 | { | 193 | |
188 | u32 m1, m0; | 194 | /* Remainder in (n1n0 - m1m0) >> bm. */ |
189 | /* Normalize. */ | 195 | if (rp != 0) { |
190 | 196 | sub_ddmmss(n1, n0, n1, n0, m1, m0); | |
191 | b = SI_TYPE_SIZE - bm; | 197 | rr.s.low = (n1 << b) | (n0 >> bm); |
192 | 198 | rr.s.high = n1 >> bm; | |
193 | d1 = (d1 << bm) | (d0 >> b); | 199 | *rp = rr.ll; |
194 | d0 = d0 << bm; | 200 | } |
195 | n2 = n1 >> b; | 201 | } |
196 | n1 = (n1 << bm) | (n0 >> b); | 202 | } |
197 | n0 = n0 << bm; | 203 | } |
198 | 204 | ||
199 | udiv_qrnnd (q0, n1, n2, n1, d1); | 205 | ww.s.low = q0; |
200 | umul_ppmm (m1, m0, q0, d0); | 206 | ww.s.high = q1; |
201 | 207 | return ww.ll; | |
202 | if (m1 > n1 || (m1 == n1 && m0 > n0)) | ||
203 | { | ||
204 | q0--; | ||
205 | sub_ddmmss (m1, m0, m1, m0, d1, d0); | ||
206 | } | ||
207 | |||
208 | q1 = 0; | ||
209 | |||
210 | /* Remainder in (n1n0 - m1m0) >> bm. */ | ||
211 | if (rp != 0) | ||
212 | { | ||
213 | sub_ddmmss (n1, n0, n1, n0, m1, m0); | ||
214 | rr.s.low = (n1 << b) | (n0 >> bm); | ||
215 | rr.s.high = n1 >> bm; | ||
216 | *rp = rr.ll; | ||
217 | } | ||
218 | } | ||
219 | } | ||
220 | } | ||
221 | |||
222 | ww.s.low = q0; | ||
223 | ww.s.high = q1; | ||
224 | return ww.ll; | ||
225 | } | 208 | } |
226 | 209 | ||
227 | u64 | 210 | u64 __udivdi3(u64 n, u64 d) |
228 | __udivdi3 (u64 n, u64 d) | ||
229 | { | 211 | { |
230 | return __udivmoddi4 (n, d, (u64 *) 0); | 212 | return __udivmoddi4(n, d, (u64 *) 0); |
231 | } | 213 | } |
232 | 214 | ||
233 | u64 | 215 | u64 __umoddi3(u64 u, u64 v) |
234 | __umoddi3 (u64 u, u64 v) | ||
235 | { | 216 | { |
236 | u64 w; | 217 | u64 w; |
237 | 218 | ||
238 | (void) __udivmoddi4 (u ,v, &w); | 219 | (void)__udivmoddi4(u, v, &w); |
239 | 220 | ||
240 | return w; | 221 | return w; |
241 | } | 222 | } |
242 | |||