xref: /nrf52832-nimble/rt-thread/libcpu/arm/common/divsi3.S (revision 104654410c56c573564690304ae786df310c91fc)
1*10465441SEvalZero/*
2*10465441SEvalZero * Copyright (c) 2006-2018, RT-Thread Development Team
3*10465441SEvalZero *
4*10465441SEvalZero * SPDX-License-Identifier: Apache-2.0
5*10465441SEvalZero *
6*10465441SEvalZero * Change Logs:
7*10465441SEvalZero * Date           Author       Notes
8*10465441SEvalZero */
9*10465441SEvalZero/*	$NetBSD: divsi3.S,v 1.5 2005/02/26 22:58:56 perry Exp $	*/
10*10465441SEvalZero
11*10465441SEvalZero/*
12*10465441SEvalZero * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
13*10465441SEvalZero * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
14*10465441SEvalZero * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
15*10465441SEvalZero * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
16*10465441SEvalZero * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
17*10465441SEvalZero * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
18*10465441SEvalZero * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
19*10465441SEvalZero * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
20*10465441SEvalZero * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
21*10465441SEvalZero * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
22*10465441SEvalZero * SUCH DAMAGE.
23*10465441SEvalZero */
24*10465441SEvalZero
25*10465441SEvalZero/*
26*10465441SEvalZero * stack is aligned as there's a possibility of branching to L_overflow
27*10465441SEvalZero * which makes a C call
28*10465441SEvalZero */
29*10465441SEvalZero	.text
30*10465441SEvalZero	.align	0
31*10465441SEvalZero	.globl	__umodsi3
32*10465441SEvalZero	.type	__umodsi3 , function
33*10465441SEvalZero__umodsi3:
34*10465441SEvalZero	stmfd	sp!, {lr}
35*10465441SEvalZero	sub	sp, sp, #4	/* align stack */
36*10465441SEvalZero	bl	.L_udivide
37*10465441SEvalZero	add	sp, sp, #4	/* unalign stack */
38*10465441SEvalZero	mov	r0, r1
39*10465441SEvalZero	ldmfd	sp!, {pc}
40*10465441SEvalZero
41*10465441SEvalZero	.text
42*10465441SEvalZero	.align	0
43*10465441SEvalZero	.globl	__modsi3
44*10465441SEvalZero	.type	__modsi3 , function
45*10465441SEvalZero__modsi3:
46*10465441SEvalZero	stmfd	sp!, {lr}
47*10465441SEvalZero	sub	sp, sp, #4	/* align stack */
48*10465441SEvalZero	bl	.L_divide
49*10465441SEvalZero	add	sp, sp, #4	/* unalign stack */
50*10465441SEvalZero	mov	r0, r1
51*10465441SEvalZero	ldmfd	sp!, {pc}
52*10465441SEvalZero
53*10465441SEvalZero.L_overflow:
54*10465441SEvalZero	/* XXX should cause a fatal error */
55*10465441SEvalZero	mvn	r0, #0
56*10465441SEvalZero	mov	pc, lr
57*10465441SEvalZero
58*10465441SEvalZero	.text
59*10465441SEvalZero	.align	0
60*10465441SEvalZero	.globl	__udivsi3
61*10465441SEvalZero	.type	__udivsi3 , function
62*10465441SEvalZero__udivsi3:
63*10465441SEvalZero.L_udivide:				/* r0 = r0 / r1; r1 = r0 % r1 */
64*10465441SEvalZero	eor     r0, r1, r0
65*10465441SEvalZero	eor     r1, r0, r1
66*10465441SEvalZero	eor     r0, r1, r0
67*10465441SEvalZero					/* r0 = r1 / r0; r1 = r1 % r0 */
68*10465441SEvalZero	cmp	r0, #1
69*10465441SEvalZero	bcc	.L_overflow
70*10465441SEvalZero	beq	.L_divide_l0
71*10465441SEvalZero	mov	ip, #0
72*10465441SEvalZero	movs	r1, r1
73*10465441SEvalZero	bpl	.L_divide_l1
74*10465441SEvalZero	orr	ip, ip, #0x20000000	/* ip bit 0x20000000 = -ve r1 */
75*10465441SEvalZero	movs	r1, r1, lsr #1
76*10465441SEvalZero	orrcs	ip, ip, #0x10000000	/* ip bit 0x10000000 = bit 0 of r1 */
77*10465441SEvalZero	b	.L_divide_l1
78*10465441SEvalZero
79*10465441SEvalZero.L_divide_l0:				/* r0 == 1 */
80*10465441SEvalZero	mov	r0, r1
81*10465441SEvalZero	mov	r1, #0
82*10465441SEvalZero	mov	pc, lr
83*10465441SEvalZero
84*10465441SEvalZero	.text
85*10465441SEvalZero	.align	0
86*10465441SEvalZero	.globl	__divsi3
87*10465441SEvalZero	.type	__divsi3 , function
88*10465441SEvalZero__divsi3:
89*10465441SEvalZero.L_divide:				/* r0 = r0 / r1; r1 = r0 % r1 */
90*10465441SEvalZero	eor     r0, r1, r0
91*10465441SEvalZero	eor     r1, r0, r1
92*10465441SEvalZero	eor     r0, r1, r0
93*10465441SEvalZero					/* r0 = r1 / r0; r1 = r1 % r0 */
94*10465441SEvalZero	cmp	r0, #1
95*10465441SEvalZero	bcc	.L_overflow
96*10465441SEvalZero	beq	.L_divide_l0
97*10465441SEvalZero	ands	ip, r0, #0x80000000
98*10465441SEvalZero	rsbmi	r0, r0, #0
99*10465441SEvalZero	ands	r2, r1, #0x80000000
100*10465441SEvalZero	eor	ip, ip, r2
101*10465441SEvalZero	rsbmi	r1, r1, #0
102*10465441SEvalZero	orr	ip, r2, ip, lsr #1	/* ip bit 0x40000000 = -ve division */
103*10465441SEvalZero					/* ip bit 0x80000000 = -ve remainder */
104*10465441SEvalZero
105*10465441SEvalZero.L_divide_l1:
106*10465441SEvalZero	mov	r2, #1
107*10465441SEvalZero	mov	r3, #0
108*10465441SEvalZero
109*10465441SEvalZero	/*
110*10465441SEvalZero	 * If the highest bit of the dividend is set, we have to be
111*10465441SEvalZero	 * careful when shifting the divisor. Test this.
112*10465441SEvalZero	 */
113*10465441SEvalZero	movs	r1,r1
114*10465441SEvalZero	bpl	.L_old_code
115*10465441SEvalZero
116*10465441SEvalZero	/*
117*10465441SEvalZero	 * At this point, the highest bit of r1 is known to be set.
118*10465441SEvalZero	 * We abuse this below in the tst instructions.
119*10465441SEvalZero	 */
120*10465441SEvalZero	tst	r1, r0 /*, lsl #0 */
121*10465441SEvalZero	bmi	.L_divide_b1
122*10465441SEvalZero	tst	r1, r0, lsl #1
123*10465441SEvalZero	bmi	.L_divide_b2
124*10465441SEvalZero	tst	r1, r0, lsl #2
125*10465441SEvalZero	bmi	.L_divide_b3
126*10465441SEvalZero	tst	r1, r0, lsl #3
127*10465441SEvalZero	bmi	.L_divide_b4
128*10465441SEvalZero	tst	r1, r0, lsl #4
129*10465441SEvalZero	bmi	.L_divide_b5
130*10465441SEvalZero	tst	r1, r0, lsl #5
131*10465441SEvalZero	bmi	.L_divide_b6
132*10465441SEvalZero	tst	r1, r0, lsl #6
133*10465441SEvalZero	bmi	.L_divide_b7
134*10465441SEvalZero	tst	r1, r0, lsl #7
135*10465441SEvalZero	bmi	.L_divide_b8
136*10465441SEvalZero	tst	r1, r0, lsl #8
137*10465441SEvalZero	bmi	.L_divide_b9
138*10465441SEvalZero	tst	r1, r0, lsl #9
139*10465441SEvalZero	bmi	.L_divide_b10
140*10465441SEvalZero	tst	r1, r0, lsl #10
141*10465441SEvalZero	bmi	.L_divide_b11
142*10465441SEvalZero	tst	r1, r0, lsl #11
143*10465441SEvalZero	bmi	.L_divide_b12
144*10465441SEvalZero	tst	r1, r0, lsl #12
145*10465441SEvalZero	bmi	.L_divide_b13
146*10465441SEvalZero	tst	r1, r0, lsl #13
147*10465441SEvalZero	bmi	.L_divide_b14
148*10465441SEvalZero	tst	r1, r0, lsl #14
149*10465441SEvalZero	bmi	.L_divide_b15
150*10465441SEvalZero	tst	r1, r0, lsl #15
151*10465441SEvalZero	bmi	.L_divide_b16
152*10465441SEvalZero	tst	r1, r0, lsl #16
153*10465441SEvalZero	bmi	.L_divide_b17
154*10465441SEvalZero	tst	r1, r0, lsl #17
155*10465441SEvalZero	bmi	.L_divide_b18
156*10465441SEvalZero	tst	r1, r0, lsl #18
157*10465441SEvalZero	bmi	.L_divide_b19
158*10465441SEvalZero	tst	r1, r0, lsl #19
159*10465441SEvalZero	bmi	.L_divide_b20
160*10465441SEvalZero	tst	r1, r0, lsl #20
161*10465441SEvalZero	bmi	.L_divide_b21
162*10465441SEvalZero	tst	r1, r0, lsl #21
163*10465441SEvalZero	bmi	.L_divide_b22
164*10465441SEvalZero	tst	r1, r0, lsl #22
165*10465441SEvalZero	bmi	.L_divide_b23
166*10465441SEvalZero	tst	r1, r0, lsl #23
167*10465441SEvalZero	bmi	.L_divide_b24
168*10465441SEvalZero	tst	r1, r0, lsl #24
169*10465441SEvalZero	bmi	.L_divide_b25
170*10465441SEvalZero	tst	r1, r0, lsl #25
171*10465441SEvalZero	bmi	.L_divide_b26
172*10465441SEvalZero	tst	r1, r0, lsl #26
173*10465441SEvalZero	bmi	.L_divide_b27
174*10465441SEvalZero	tst	r1, r0, lsl #27
175*10465441SEvalZero	bmi	.L_divide_b28
176*10465441SEvalZero	tst	r1, r0, lsl #28
177*10465441SEvalZero	bmi	.L_divide_b29
178*10465441SEvalZero	tst	r1, r0, lsl #29
179*10465441SEvalZero	bmi	.L_divide_b30
180*10465441SEvalZero	tst	r1, r0, lsl #30
181*10465441SEvalZero	bmi	.L_divide_b31
182*10465441SEvalZero/*
183*10465441SEvalZero * instead of:
184*10465441SEvalZero *	tst	r1, r0, lsl #31
185*10465441SEvalZero *	bmi	.L_divide_b32
186*10465441SEvalZero */
187*10465441SEvalZero	b	.L_divide_b32
188*10465441SEvalZero
189*10465441SEvalZero.L_old_code:
190*10465441SEvalZero	cmp	r1, r0
191*10465441SEvalZero	bcc	.L_divide_b0
192*10465441SEvalZero	cmp	r1, r0, lsl #1
193*10465441SEvalZero	bcc	.L_divide_b1
194*10465441SEvalZero	cmp	r1, r0, lsl #2
195*10465441SEvalZero	bcc	.L_divide_b2
196*10465441SEvalZero	cmp	r1, r0, lsl #3
197*10465441SEvalZero	bcc	.L_divide_b3
198*10465441SEvalZero	cmp	r1, r0, lsl #4
199*10465441SEvalZero	bcc	.L_divide_b4
200*10465441SEvalZero	cmp	r1, r0, lsl #5
201*10465441SEvalZero	bcc	.L_divide_b5
202*10465441SEvalZero	cmp	r1, r0, lsl #6
203*10465441SEvalZero	bcc	.L_divide_b6
204*10465441SEvalZero	cmp	r1, r0, lsl #7
205*10465441SEvalZero	bcc	.L_divide_b7
206*10465441SEvalZero	cmp	r1, r0, lsl #8
207*10465441SEvalZero	bcc	.L_divide_b8
208*10465441SEvalZero	cmp	r1, r0, lsl #9
209*10465441SEvalZero	bcc	.L_divide_b9
210*10465441SEvalZero	cmp	r1, r0, lsl #10
211*10465441SEvalZero	bcc	.L_divide_b10
212*10465441SEvalZero	cmp	r1, r0, lsl #11
213*10465441SEvalZero	bcc	.L_divide_b11
214*10465441SEvalZero	cmp	r1, r0, lsl #12
215*10465441SEvalZero	bcc	.L_divide_b12
216*10465441SEvalZero	cmp	r1, r0, lsl #13
217*10465441SEvalZero	bcc	.L_divide_b13
218*10465441SEvalZero	cmp	r1, r0, lsl #14
219*10465441SEvalZero	bcc	.L_divide_b14
220*10465441SEvalZero	cmp	r1, r0, lsl #15
221*10465441SEvalZero	bcc	.L_divide_b15
222*10465441SEvalZero	cmp	r1, r0, lsl #16
223*10465441SEvalZero	bcc	.L_divide_b16
224*10465441SEvalZero	cmp	r1, r0, lsl #17
225*10465441SEvalZero	bcc	.L_divide_b17
226*10465441SEvalZero	cmp	r1, r0, lsl #18
227*10465441SEvalZero	bcc	.L_divide_b18
228*10465441SEvalZero	cmp	r1, r0, lsl #19
229*10465441SEvalZero	bcc	.L_divide_b19
230*10465441SEvalZero	cmp	r1, r0, lsl #20
231*10465441SEvalZero	bcc	.L_divide_b20
232*10465441SEvalZero	cmp	r1, r0, lsl #21
233*10465441SEvalZero	bcc	.L_divide_b21
234*10465441SEvalZero	cmp	r1, r0, lsl #22
235*10465441SEvalZero	bcc	.L_divide_b22
236*10465441SEvalZero	cmp	r1, r0, lsl #23
237*10465441SEvalZero	bcc	.L_divide_b23
238*10465441SEvalZero	cmp	r1, r0, lsl #24
239*10465441SEvalZero	bcc	.L_divide_b24
240*10465441SEvalZero	cmp	r1, r0, lsl #25
241*10465441SEvalZero	bcc	.L_divide_b25
242*10465441SEvalZero	cmp	r1, r0, lsl #26
243*10465441SEvalZero	bcc	.L_divide_b26
244*10465441SEvalZero	cmp	r1, r0, lsl #27
245*10465441SEvalZero	bcc	.L_divide_b27
246*10465441SEvalZero	cmp	r1, r0, lsl #28
247*10465441SEvalZero	bcc	.L_divide_b28
248*10465441SEvalZero	cmp	r1, r0, lsl #29
249*10465441SEvalZero	bcc	.L_divide_b29
250*10465441SEvalZero	cmp	r1, r0, lsl #30
251*10465441SEvalZero	bcc	.L_divide_b30
252*10465441SEvalZero.L_divide_b32:
253*10465441SEvalZero	cmp	r1, r0, lsl #31
254*10465441SEvalZero	subhs	r1, r1,r0, lsl #31
255*10465441SEvalZero	addhs	r3, r3,r2, lsl #31
256*10465441SEvalZero.L_divide_b31:
257*10465441SEvalZero	cmp	r1, r0, lsl #30
258*10465441SEvalZero	subhs	r1, r1,r0, lsl #30
259*10465441SEvalZero	addhs	r3, r3,r2, lsl #30
260*10465441SEvalZero.L_divide_b30:
261*10465441SEvalZero	cmp	r1, r0, lsl #29
262*10465441SEvalZero	subhs	r1, r1,r0, lsl #29
263*10465441SEvalZero	addhs	r3, r3,r2, lsl #29
264*10465441SEvalZero.L_divide_b29:
265*10465441SEvalZero	cmp	r1, r0, lsl #28
266*10465441SEvalZero	subhs	r1, r1,r0, lsl #28
267*10465441SEvalZero	addhs	r3, r3,r2, lsl #28
268*10465441SEvalZero.L_divide_b28:
269*10465441SEvalZero	cmp	r1, r0, lsl #27
270*10465441SEvalZero	subhs	r1, r1,r0, lsl #27
271*10465441SEvalZero	addhs	r3, r3,r2, lsl #27
272*10465441SEvalZero.L_divide_b27:
273*10465441SEvalZero	cmp	r1, r0, lsl #26
274*10465441SEvalZero	subhs	r1, r1,r0, lsl #26
275*10465441SEvalZero	addhs	r3, r3,r2, lsl #26
276*10465441SEvalZero.L_divide_b26:
277*10465441SEvalZero	cmp	r1, r0, lsl #25
278*10465441SEvalZero	subhs	r1, r1,r0, lsl #25
279*10465441SEvalZero	addhs	r3, r3,r2, lsl #25
280*10465441SEvalZero.L_divide_b25:
281*10465441SEvalZero	cmp	r1, r0, lsl #24
282*10465441SEvalZero	subhs	r1, r1,r0, lsl #24
283*10465441SEvalZero	addhs	r3, r3,r2, lsl #24
284*10465441SEvalZero.L_divide_b24:
285*10465441SEvalZero	cmp	r1, r0, lsl #23
286*10465441SEvalZero	subhs	r1, r1,r0, lsl #23
287*10465441SEvalZero	addhs	r3, r3,r2, lsl #23
288*10465441SEvalZero.L_divide_b23:
289*10465441SEvalZero	cmp	r1, r0, lsl #22
290*10465441SEvalZero	subhs	r1, r1,r0, lsl #22
291*10465441SEvalZero	addhs	r3, r3,r2, lsl #22
292*10465441SEvalZero.L_divide_b22:
293*10465441SEvalZero	cmp	r1, r0, lsl #21
294*10465441SEvalZero	subhs	r1, r1,r0, lsl #21
295*10465441SEvalZero	addhs	r3, r3,r2, lsl #21
296*10465441SEvalZero.L_divide_b21:
297*10465441SEvalZero	cmp	r1, r0, lsl #20
298*10465441SEvalZero	subhs	r1, r1,r0, lsl #20
299*10465441SEvalZero	addhs	r3, r3,r2, lsl #20
300*10465441SEvalZero.L_divide_b20:
301*10465441SEvalZero	cmp	r1, r0, lsl #19
302*10465441SEvalZero	subhs	r1, r1,r0, lsl #19
303*10465441SEvalZero	addhs	r3, r3,r2, lsl #19
304*10465441SEvalZero.L_divide_b19:
305*10465441SEvalZero	cmp	r1, r0, lsl #18
306*10465441SEvalZero	subhs	r1, r1,r0, lsl #18
307*10465441SEvalZero	addhs	r3, r3,r2, lsl #18
308*10465441SEvalZero.L_divide_b18:
309*10465441SEvalZero	cmp	r1, r0, lsl #17
310*10465441SEvalZero	subhs	r1, r1,r0, lsl #17
311*10465441SEvalZero	addhs	r3, r3,r2, lsl #17
312*10465441SEvalZero.L_divide_b17:
313*10465441SEvalZero	cmp	r1, r0, lsl #16
314*10465441SEvalZero	subhs	r1, r1,r0, lsl #16
315*10465441SEvalZero	addhs	r3, r3,r2, lsl #16
316*10465441SEvalZero.L_divide_b16:
317*10465441SEvalZero	cmp	r1, r0, lsl #15
318*10465441SEvalZero	subhs	r1, r1,r0, lsl #15
319*10465441SEvalZero	addhs	r3, r3,r2, lsl #15
320*10465441SEvalZero.L_divide_b15:
321*10465441SEvalZero	cmp	r1, r0, lsl #14
322*10465441SEvalZero	subhs	r1, r1,r0, lsl #14
323*10465441SEvalZero	addhs	r3, r3,r2, lsl #14
324*10465441SEvalZero.L_divide_b14:
325*10465441SEvalZero	cmp	r1, r0, lsl #13
326*10465441SEvalZero	subhs	r1, r1,r0, lsl #13
327*10465441SEvalZero	addhs	r3, r3,r2, lsl #13
328*10465441SEvalZero.L_divide_b13:
329*10465441SEvalZero	cmp	r1, r0, lsl #12
330*10465441SEvalZero	subhs	r1, r1,r0, lsl #12
331*10465441SEvalZero	addhs	r3, r3,r2, lsl #12
332*10465441SEvalZero.L_divide_b12:
333*10465441SEvalZero	cmp	r1, r0, lsl #11
334*10465441SEvalZero	subhs	r1, r1,r0, lsl #11
335*10465441SEvalZero	addhs	r3, r3,r2, lsl #11
336*10465441SEvalZero.L_divide_b11:
337*10465441SEvalZero	cmp	r1, r0, lsl #10
338*10465441SEvalZero	subhs	r1, r1,r0, lsl #10
339*10465441SEvalZero	addhs	r3, r3,r2, lsl #10
340*10465441SEvalZero.L_divide_b10:
341*10465441SEvalZero	cmp	r1, r0, lsl #9
342*10465441SEvalZero	subhs	r1, r1,r0, lsl #9
343*10465441SEvalZero	addhs	r3, r3,r2, lsl #9
344*10465441SEvalZero.L_divide_b9:
345*10465441SEvalZero	cmp	r1, r0, lsl #8
346*10465441SEvalZero	subhs	r1, r1,r0, lsl #8
347*10465441SEvalZero	addhs	r3, r3,r2, lsl #8
348*10465441SEvalZero.L_divide_b8:
349*10465441SEvalZero	cmp	r1, r0, lsl #7
350*10465441SEvalZero	subhs	r1, r1,r0, lsl #7
351*10465441SEvalZero	addhs	r3, r3,r2, lsl #7
352*10465441SEvalZero.L_divide_b7:
353*10465441SEvalZero	cmp	r1, r0, lsl #6
354*10465441SEvalZero	subhs	r1, r1,r0, lsl #6
355*10465441SEvalZero	addhs	r3, r3,r2, lsl #6
356*10465441SEvalZero.L_divide_b6:
357*10465441SEvalZero	cmp	r1, r0, lsl #5
358*10465441SEvalZero	subhs	r1, r1,r0, lsl #5
359*10465441SEvalZero	addhs	r3, r3,r2, lsl #5
360*10465441SEvalZero.L_divide_b5:
361*10465441SEvalZero	cmp	r1, r0, lsl #4
362*10465441SEvalZero	subhs	r1, r1,r0, lsl #4
363*10465441SEvalZero	addhs	r3, r3,r2, lsl #4
364*10465441SEvalZero.L_divide_b4:
365*10465441SEvalZero	cmp	r1, r0, lsl #3
366*10465441SEvalZero	subhs	r1, r1,r0, lsl #3
367*10465441SEvalZero	addhs	r3, r3,r2, lsl #3
368*10465441SEvalZero.L_divide_b3:
369*10465441SEvalZero	cmp	r1, r0, lsl #2
370*10465441SEvalZero	subhs	r1, r1,r0, lsl #2
371*10465441SEvalZero	addhs	r3, r3,r2, lsl #2
372*10465441SEvalZero.L_divide_b2:
373*10465441SEvalZero	cmp	r1, r0, lsl #1
374*10465441SEvalZero	subhs	r1, r1,r0, lsl #1
375*10465441SEvalZero	addhs	r3, r3,r2, lsl #1
376*10465441SEvalZero.L_divide_b1:
377*10465441SEvalZero	cmp	r1, r0
378*10465441SEvalZero	subhs	r1, r1, r0
379*10465441SEvalZero	addhs	r3, r3, r2
380*10465441SEvalZero.L_divide_b0:
381*10465441SEvalZero
382*10465441SEvalZero	tst	ip, #0x20000000
383*10465441SEvalZero	bne	.L_udivide_l1
384*10465441SEvalZero	mov	r0, r3
385*10465441SEvalZero	cmp	ip, #0
386*10465441SEvalZero	rsbmi	r1, r1, #0
387*10465441SEvalZero	movs	ip, ip, lsl #1
388*10465441SEvalZero	bicmi	r0, r0, #0x80000000	/* Fix incase we divided 0x80000000 */
389*10465441SEvalZero	rsbmi	r0, r0, #0
390*10465441SEvalZero	mov	pc, lr
391*10465441SEvalZero
392*10465441SEvalZero.L_udivide_l1:
393*10465441SEvalZero	tst	ip, #0x10000000
394*10465441SEvalZero	mov	r1, r1, lsl #1
395*10465441SEvalZero	orrne	r1, r1, #1
396*10465441SEvalZero	mov	r3, r3, lsl #1
397*10465441SEvalZero	cmp	r1, r0
398*10465441SEvalZero	subhs	r1, r1, r0
399*10465441SEvalZero	addhs	r3, r3, r2
400*10465441SEvalZero	mov	r0, r3
401*10465441SEvalZero	mov	pc, lr
402