aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/lib/udivdi3.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/lib/udivdi3.c')
-rw-r--r--arch/arm/lib/udivdi3.c222
1 files changed, 0 insertions, 222 deletions
diff --git a/arch/arm/lib/udivdi3.c b/arch/arm/lib/udivdi3.c
deleted file mode 100644
index e343be4c6642..000000000000
--- a/arch/arm/lib/udivdi3.c
+++ /dev/null
@@ -1,222 +0,0 @@
1/* More subroutines needed by GCC output code on some machines. */
2/* Compile this one with gcc. */
3/* Copyright (C) 1989, 92-98, 1999 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22/* As a special exception, if you link this library with other files,
23 some of which are compiled with GCC, to produce an executable,
24 this library does not by itself cause the resulting executable
25 to be covered by the GNU General Public License.
26 This exception does not however invalidate any other reasons why
27 the executable file might be covered by the GNU General Public License.
28 */
29/* support functions required by the kernel. based on code from gcc-2.95.3 */
30/* I Molton 29/07/01 */
31
32#include "gcclib.h"
33#include "longlong.h"
34
35static const u8 __clz_tab[] = {
36 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5,
37 5, 5, 5, 5, 5, 5, 5, 5,
38 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
39 6, 6, 6, 6, 6, 6, 6, 6,
40 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
41 7, 7, 7, 7, 7, 7, 7, 7,
42 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
43 7, 7, 7, 7, 7, 7, 7, 7,
44 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
45 8, 8, 8, 8, 8, 8, 8, 8,
46 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
47 8, 8, 8, 8, 8, 8, 8, 8,
48 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
49 8, 8, 8, 8, 8, 8, 8, 8,
50 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
51 8, 8, 8, 8, 8, 8, 8, 8,
52};
53
54u64 __udivmoddi4(u64 n, u64 d, u64 * rp)
55{
56 DIunion ww;
57 DIunion nn, dd;
58 DIunion rr;
59 u32 d0, d1, n0, n1, n2;
60 u32 q0, q1;
61 u32 b, bm;
62
63 nn.ll = n;
64 dd.ll = d;
65
66 d0 = dd.s.low;
67 d1 = dd.s.high;
68 n0 = nn.s.low;
69 n1 = nn.s.high;
70
71 if (d1 == 0) {
72 if (d0 > n1) {
73 /* 0q = nn / 0D */
74
75 count_leading_zeros(bm, d0);
76
77 if (bm != 0) {
78 /* Normalize, i.e. make the most significant bit of the
79 denominator set. */
80
81 d0 = d0 << bm;
82 n1 = (n1 << bm) | (n0 >> (SI_TYPE_SIZE - bm));
83 n0 = n0 << bm;
84 }
85
86 udiv_qrnnd(q0, n0, n1, n0, d0);
87 q1 = 0;
88
89 /* Remainder in n0 >> bm. */
90 } else {
91 /* qq = NN / 0d */
92
93 if (d0 == 0)
94 d0 = 1 / d0; /* Divide intentionally by zero. */
95
96 count_leading_zeros(bm, d0);
97
98 if (bm == 0) {
99 /* From (n1 >= d0) /\ (the most significant bit of d0 is set),
100 conclude (the most significant bit of n1 is set) /\ (the
101 leading quotient digit q1 = 1).
102
103 This special case is necessary, not an optimization.
104 (Shifts counts of SI_TYPE_SIZE are undefined.) */
105
106 n1 -= d0;
107 q1 = 1;
108 } else {
109 /* Normalize. */
110
111 b = SI_TYPE_SIZE - bm;
112
113 d0 = d0 << bm;
114 n2 = n1 >> b;
115 n1 = (n1 << bm) | (n0 >> b);
116 n0 = n0 << bm;
117
118 udiv_qrnnd(q1, n1, n2, n1, d0);
119 }
120
121 /* n1 != d0... */
122
123 udiv_qrnnd(q0, n0, n1, n0, d0);
124
125 /* Remainder in n0 >> bm. */
126 }
127
128 if (rp != 0) {
129 rr.s.low = n0 >> bm;
130 rr.s.high = 0;
131 *rp = rr.ll;
132 }
133 } else {
134 if (d1 > n1) {
135 /* 00 = nn / DD */
136
137 q0 = 0;
138 q1 = 0;
139
140 /* Remainder in n1n0. */
141 if (rp != 0) {
142 rr.s.low = n0;
143 rr.s.high = n1;
144 *rp = rr.ll;
145 }
146 } else {
147 /* 0q = NN / dd */
148
149 count_leading_zeros(bm, d1);
150 if (bm == 0) {
151 /* From (n1 >= d1) /\ (the most significant bit of d1 is set),
152 conclude (the most significant bit of n1 is set) /\ (the
153 quotient digit q0 = 0 or 1).
154
155 This special case is necessary, not an optimization. */
156
157 /* The condition on the next line takes advantage of that
158 n1 >= d1 (true due to program flow). */
159 if (n1 > d1 || n0 >= d0) {
160 q0 = 1;
161 sub_ddmmss(n1, n0, n1, n0, d1, d0);
162 } else
163 q0 = 0;
164
165 q1 = 0;
166
167 if (rp != 0) {
168 rr.s.low = n0;
169 rr.s.high = n1;
170 *rp = rr.ll;
171 }
172 } else {
173 u32 m1, m0;
174 /* Normalize. */
175
176 b = SI_TYPE_SIZE - bm;
177
178 d1 = (d1 << bm) | (d0 >> b);
179 d0 = d0 << bm;
180 n2 = n1 >> b;
181 n1 = (n1 << bm) | (n0 >> b);
182 n0 = n0 << bm;
183
184 udiv_qrnnd(q0, n1, n2, n1, d1);
185 umul_ppmm(m1, m0, q0, d0);
186
187 if (m1 > n1 || (m1 == n1 && m0 > n0)) {
188 q0--;
189 sub_ddmmss(m1, m0, m1, m0, d1, d0);
190 }
191
192 q1 = 0;
193
194 /* Remainder in (n1n0 - m1m0) >> bm. */
195 if (rp != 0) {
196 sub_ddmmss(n1, n0, n1, n0, m1, m0);
197 rr.s.low = (n1 << b) | (n0 >> bm);
198 rr.s.high = n1 >> bm;
199 *rp = rr.ll;
200 }
201 }
202 }
203 }
204
205 ww.s.low = q0;
206 ww.s.high = q1;
207 return ww.ll;
208}
209
210u64 __udivdi3(u64 n, u64 d)
211{
212 return __udivmoddi4(n, d, (u64 *) 0);
213}
214
215u64 __umoddi3(u64 u, u64 v)
216{
217 u64 w;
218
219 (void)__udivmoddi4(u, v, &w);
220
221 return w;
222}