blob: bb587d5f3d9d4e7972af5680708b258ed2680dbc [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#include <linux/kernel.h>
2#include <linux/sched.h>
3#include <linux/types.h>
4#include <asm/byteorder.h>
5
6#define add_ssaaaa(sh, sl, ah, al, bh, bl) \
Sam Ravnborg347b0cf2014-05-16 23:25:46 +02007 __asm__ ("addcc %r4,%5,%1\n\t" \
Linus Torvalds1da177e2005-04-16 15:20:36 -07008 "addx %r2,%3,%0\n" \
Sam Ravnborg347b0cf2014-05-16 23:25:46 +02009 : "=r" (sh), \
10 "=&r" (sl) \
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 : "%rJ" ((USItype)(ah)), \
12 "rI" ((USItype)(bh)), \
13 "%rJ" ((USItype)(al)), \
14 "rI" ((USItype)(bl)) \
15 : "cc")
16#define sub_ddmmss(sh, sl, ah, al, bh, bl) \
Sam Ravnborg347b0cf2014-05-16 23:25:46 +020017 __asm__ ("subcc %r4,%5,%1\n\t" \
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 "subx %r2,%3,%0\n" \
Sam Ravnborg347b0cf2014-05-16 23:25:46 +020019 : "=r" (sh), \
20 "=&r" (sl) \
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 : "rJ" ((USItype)(ah)), \
22 "rI" ((USItype)(bh)), \
23 "rJ" ((USItype)(al)), \
24 "rI" ((USItype)(bl)) \
25 : "cc")
26
27#define umul_ppmm(w1, w0, u, v) \
28 __asm__ ("! Inlined umul_ppmm\n\t" \
29 "wr %%g0,%2,%%y ! SPARC has 0-3 delay insn after a wr\n\t" \
30 "sra %3,31,%%g2 ! Don't move this insn\n\t" \
31 "and %2,%%g2,%%g2 ! Don't move this insn\n\t" \
32 "andcc %%g0,0,%%g1 ! Don't move this insn\n\t" \
33 "mulscc %%g1,%3,%%g1\n\t" \
34 "mulscc %%g1,%3,%%g1\n\t" \
35 "mulscc %%g1,%3,%%g1\n\t" \
36 "mulscc %%g1,%3,%%g1\n\t" \
37 "mulscc %%g1,%3,%%g1\n\t" \
38 "mulscc %%g1,%3,%%g1\n\t" \
39 "mulscc %%g1,%3,%%g1\n\t" \
40 "mulscc %%g1,%3,%%g1\n\t" \
41 "mulscc %%g1,%3,%%g1\n\t" \
42 "mulscc %%g1,%3,%%g1\n\t" \
43 "mulscc %%g1,%3,%%g1\n\t" \
44 "mulscc %%g1,%3,%%g1\n\t" \
45 "mulscc %%g1,%3,%%g1\n\t" \
46 "mulscc %%g1,%3,%%g1\n\t" \
47 "mulscc %%g1,%3,%%g1\n\t" \
48 "mulscc %%g1,%3,%%g1\n\t" \
49 "mulscc %%g1,%3,%%g1\n\t" \
50 "mulscc %%g1,%3,%%g1\n\t" \
51 "mulscc %%g1,%3,%%g1\n\t" \
52 "mulscc %%g1,%3,%%g1\n\t" \
53 "mulscc %%g1,%3,%%g1\n\t" \
54 "mulscc %%g1,%3,%%g1\n\t" \
55 "mulscc %%g1,%3,%%g1\n\t" \
56 "mulscc %%g1,%3,%%g1\n\t" \
57 "mulscc %%g1,%3,%%g1\n\t" \
58 "mulscc %%g1,%3,%%g1\n\t" \
59 "mulscc %%g1,%3,%%g1\n\t" \
60 "mulscc %%g1,%3,%%g1\n\t" \
61 "mulscc %%g1,%3,%%g1\n\t" \
62 "mulscc %%g1,%3,%%g1\n\t" \
63 "mulscc %%g1,%3,%%g1\n\t" \
64 "mulscc %%g1,%3,%%g1\n\t" \
65 "mulscc %%g1,0,%%g1\n\t" \
66 "add %%g1,%%g2,%0\n\t" \
67 "rd %%y,%1\n" \
Sam Ravnborg347b0cf2014-05-16 23:25:46 +020068 : "=r" (w1), \
69 "=r" (w0) \
Linus Torvalds1da177e2005-04-16 15:20:36 -070070 : "%rI" ((USItype)(u)), \
71 "r" ((USItype)(v)) \
72 : "%g1", "%g2", "cc")
73
74/* It's quite necessary to add this much assembler for the sparc.
75 The default udiv_qrnnd (in C) is more than 10 times slower! */
76#define udiv_qrnnd(q, r, n1, n0, d) \
77 __asm__ ("! Inlined udiv_qrnnd\n\t" \
78 "mov 32,%%g1\n\t" \
79 "subcc %1,%2,%%g0\n\t" \
80 "1: bcs 5f\n\t" \
81 "addxcc %0,%0,%0 ! shift n1n0 and a q-bit in lsb\n\t" \
82 "sub %1,%2,%1 ! this kills msb of n\n\t" \
83 "addx %1,%1,%1 ! so this can't give carry\n\t" \
84 "subcc %%g1,1,%%g1\n\t" \
85 "2: bne 1b\n\t" \
86 "subcc %1,%2,%%g0\n\t" \
87 "bcs 3f\n\t" \
88 "addxcc %0,%0,%0 ! shift n1n0 and a q-bit in lsb\n\t" \
89 "b 3f\n\t" \
90 "sub %1,%2,%1 ! this kills msb of n\n\t" \
91 "4: sub %1,%2,%1\n\t" \
92 "5: addxcc %1,%1,%1\n\t" \
93 "bcc 2b\n\t" \
94 "subcc %%g1,1,%%g1\n\t" \
95 "! Got carry from n. Subtract next step to cancel this carry.\n\t" \
96 "bne 4b\n\t" \
97 "addcc %0,%0,%0 ! shift n1n0 and a 0-bit in lsb\n\t" \
98 "sub %1,%2,%1\n\t" \
99 "3: xnor %0,0,%0\n\t" \
100 "! End of inline udiv_qrnnd\n" \
Sam Ravnborg347b0cf2014-05-16 23:25:46 +0200101 : "=&r" (q), \
102 "=&r" (r) \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700103 : "r" ((USItype)(d)), \
104 "1" ((USItype)(n1)), \
105 "0" ((USItype)(n0)) : "%g1", "cc")
106#define UDIV_NEEDS_NORMALIZATION 0
107
108#define abort() \
109 return 0
Linus Torvalds13da9e22010-05-26 08:30:15 -0700110
111#ifdef __BIG_ENDIAN
112#define __BYTE_ORDER __BIG_ENDIAN
113#else
114#define __BYTE_ORDER __LITTLE_ENDIAN
115#endif