blob: 25c8f6d96fb80ab94af2d51ac56fae9cd18338ab [file] [log] [blame]
Alex Bradbury21d28fe2018-04-12 05:50:06 +00001; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -mattr=+d -verify-machineinstrs < %s \
3; RUN: | FileCheck -check-prefix=RV32IFD %s
Alex Bradbury7539fa22019-02-01 03:53:30 +00004; RUN: llc -mtriple=riscv64 -mattr=+d -verify-machineinstrs < %s \
5; RUN: | FileCheck -check-prefix=RV64IFD %s
Alex Bradbury21d28fe2018-04-12 05:50:06 +00006
7define i32 @fcmp_false(double %a, double %b) nounwind {
8; RV32IFD-LABEL: fcmp_false:
9; RV32IFD: # %bb.0:
10; RV32IFD-NEXT: mv a0, zero
11; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +000012;
13; RV64IFD-LABEL: fcmp_false:
14; RV64IFD: # %bb.0:
15; RV64IFD-NEXT: mv a0, zero
16; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +000017 %1 = fcmp false double %a, %b
18 %2 = zext i1 %1 to i32
19 ret i32 %2
20}
21
22define i32 @fcmp_oeq(double %a, double %b) nounwind {
23; RV32IFD-LABEL: fcmp_oeq:
24; RV32IFD: # %bb.0:
25; RV32IFD-NEXT: addi sp, sp, -16
26; RV32IFD-NEXT: sw a2, 8(sp)
27; RV32IFD-NEXT: sw a3, 12(sp)
28; RV32IFD-NEXT: fld ft0, 8(sp)
29; RV32IFD-NEXT: sw a0, 8(sp)
30; RV32IFD-NEXT: sw a1, 12(sp)
31; RV32IFD-NEXT: fld ft1, 8(sp)
32; RV32IFD-NEXT: feq.d a0, ft1, ft0
33; RV32IFD-NEXT: addi sp, sp, 16
34; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +000035;
36; RV64IFD-LABEL: fcmp_oeq:
37; RV64IFD: # %bb.0:
38; RV64IFD-NEXT: fmv.d.x ft0, a1
39; RV64IFD-NEXT: fmv.d.x ft1, a0
40; RV64IFD-NEXT: feq.d a0, ft1, ft0
41; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +000042 %1 = fcmp oeq double %a, %b
43 %2 = zext i1 %1 to i32
44 ret i32 %2
45}
46
47define i32 @fcmp_ogt(double %a, double %b) nounwind {
48; RV32IFD-LABEL: fcmp_ogt:
49; RV32IFD: # %bb.0:
50; RV32IFD-NEXT: addi sp, sp, -16
51; RV32IFD-NEXT: sw a0, 8(sp)
52; RV32IFD-NEXT: sw a1, 12(sp)
53; RV32IFD-NEXT: fld ft0, 8(sp)
54; RV32IFD-NEXT: sw a2, 8(sp)
55; RV32IFD-NEXT: sw a3, 12(sp)
56; RV32IFD-NEXT: fld ft1, 8(sp)
57; RV32IFD-NEXT: flt.d a0, ft1, ft0
58; RV32IFD-NEXT: addi sp, sp, 16
59; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +000060;
61; RV64IFD-LABEL: fcmp_ogt:
62; RV64IFD: # %bb.0:
63; RV64IFD-NEXT: fmv.d.x ft0, a0
64; RV64IFD-NEXT: fmv.d.x ft1, a1
65; RV64IFD-NEXT: flt.d a0, ft1, ft0
66; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +000067 %1 = fcmp ogt double %a, %b
68 %2 = zext i1 %1 to i32
69 ret i32 %2
70}
71
72define i32 @fcmp_oge(double %a, double %b) nounwind {
73; RV32IFD-LABEL: fcmp_oge:
74; RV32IFD: # %bb.0:
75; RV32IFD-NEXT: addi sp, sp, -16
76; RV32IFD-NEXT: sw a0, 8(sp)
77; RV32IFD-NEXT: sw a1, 12(sp)
78; RV32IFD-NEXT: fld ft0, 8(sp)
79; RV32IFD-NEXT: sw a2, 8(sp)
80; RV32IFD-NEXT: sw a3, 12(sp)
81; RV32IFD-NEXT: fld ft1, 8(sp)
82; RV32IFD-NEXT: fle.d a0, ft1, ft0
83; RV32IFD-NEXT: addi sp, sp, 16
84; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +000085;
86; RV64IFD-LABEL: fcmp_oge:
87; RV64IFD: # %bb.0:
88; RV64IFD-NEXT: fmv.d.x ft0, a0
89; RV64IFD-NEXT: fmv.d.x ft1, a1
90; RV64IFD-NEXT: fle.d a0, ft1, ft0
91; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +000092 %1 = fcmp oge double %a, %b
93 %2 = zext i1 %1 to i32
94 ret i32 %2
95}
96
97define i32 @fcmp_olt(double %a, double %b) nounwind {
98; RV32IFD-LABEL: fcmp_olt:
99; RV32IFD: # %bb.0:
100; RV32IFD-NEXT: addi sp, sp, -16
101; RV32IFD-NEXT: sw a2, 8(sp)
102; RV32IFD-NEXT: sw a3, 12(sp)
103; RV32IFD-NEXT: fld ft0, 8(sp)
104; RV32IFD-NEXT: sw a0, 8(sp)
105; RV32IFD-NEXT: sw a1, 12(sp)
106; RV32IFD-NEXT: fld ft1, 8(sp)
107; RV32IFD-NEXT: flt.d a0, ft1, ft0
108; RV32IFD-NEXT: addi sp, sp, 16
109; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000110;
111; RV64IFD-LABEL: fcmp_olt:
112; RV64IFD: # %bb.0:
113; RV64IFD-NEXT: fmv.d.x ft0, a1
114; RV64IFD-NEXT: fmv.d.x ft1, a0
115; RV64IFD-NEXT: flt.d a0, ft1, ft0
116; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000117 %1 = fcmp olt double %a, %b
118 %2 = zext i1 %1 to i32
119 ret i32 %2
120}
121
122define i32 @fcmp_ole(double %a, double %b) nounwind {
123; RV32IFD-LABEL: fcmp_ole:
124; RV32IFD: # %bb.0:
125; RV32IFD-NEXT: addi sp, sp, -16
126; RV32IFD-NEXT: sw a2, 8(sp)
127; RV32IFD-NEXT: sw a3, 12(sp)
128; RV32IFD-NEXT: fld ft0, 8(sp)
129; RV32IFD-NEXT: sw a0, 8(sp)
130; RV32IFD-NEXT: sw a1, 12(sp)
131; RV32IFD-NEXT: fld ft1, 8(sp)
132; RV32IFD-NEXT: fle.d a0, ft1, ft0
133; RV32IFD-NEXT: addi sp, sp, 16
134; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000135;
136; RV64IFD-LABEL: fcmp_ole:
137; RV64IFD: # %bb.0:
138; RV64IFD-NEXT: fmv.d.x ft0, a1
139; RV64IFD-NEXT: fmv.d.x ft1, a0
140; RV64IFD-NEXT: fle.d a0, ft1, ft0
141; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000142 %1 = fcmp ole double %a, %b
143 %2 = zext i1 %1 to i32
144 ret i32 %2
145}
146
147define i32 @fcmp_one(double %a, double %b) nounwind {
148; RV32IFD-LABEL: fcmp_one:
149; RV32IFD: # %bb.0:
150; RV32IFD-NEXT: addi sp, sp, -16
151; RV32IFD-NEXT: sw a0, 8(sp)
152; RV32IFD-NEXT: sw a1, 12(sp)
153; RV32IFD-NEXT: fld ft0, 8(sp)
154; RV32IFD-NEXT: sw a2, 8(sp)
155; RV32IFD-NEXT: sw a3, 12(sp)
156; RV32IFD-NEXT: fld ft1, 8(sp)
157; RV32IFD-NEXT: feq.d a0, ft1, ft1
158; RV32IFD-NEXT: feq.d a1, ft0, ft0
159; RV32IFD-NEXT: and a0, a1, a0
160; RV32IFD-NEXT: feq.d a1, ft0, ft1
161; RV32IFD-NEXT: not a1, a1
162; RV32IFD-NEXT: seqz a0, a0
163; RV32IFD-NEXT: xori a0, a0, 1
164; RV32IFD-NEXT: and a0, a1, a0
165; RV32IFD-NEXT: addi sp, sp, 16
166; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000167;
168; RV64IFD-LABEL: fcmp_one:
169; RV64IFD: # %bb.0:
170; RV64IFD-NEXT: fmv.d.x ft0, a0
171; RV64IFD-NEXT: fmv.d.x ft1, a1
172; RV64IFD-NEXT: feq.d a0, ft1, ft1
173; RV64IFD-NEXT: feq.d a1, ft0, ft0
174; RV64IFD-NEXT: and a0, a1, a0
175; RV64IFD-NEXT: feq.d a1, ft0, ft1
176; RV64IFD-NEXT: not a1, a1
177; RV64IFD-NEXT: seqz a0, a0
178; RV64IFD-NEXT: xori a0, a0, 1
179; RV64IFD-NEXT: and a0, a1, a0
180; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000181 %1 = fcmp one double %a, %b
182 %2 = zext i1 %1 to i32
183 ret i32 %2
184}
185
186define i32 @fcmp_ord(double %a, double %b) nounwind {
187; RV32IFD-LABEL: fcmp_ord:
188; RV32IFD: # %bb.0:
189; RV32IFD-NEXT: addi sp, sp, -16
190; RV32IFD-NEXT: sw a0, 8(sp)
191; RV32IFD-NEXT: sw a1, 12(sp)
192; RV32IFD-NEXT: fld ft0, 8(sp)
193; RV32IFD-NEXT: sw a2, 8(sp)
194; RV32IFD-NEXT: sw a3, 12(sp)
195; RV32IFD-NEXT: fld ft1, 8(sp)
196; RV32IFD-NEXT: feq.d a0, ft1, ft1
197; RV32IFD-NEXT: feq.d a1, ft0, ft0
198; RV32IFD-NEXT: and a0, a1, a0
199; RV32IFD-NEXT: seqz a0, a0
200; RV32IFD-NEXT: xori a0, a0, 1
201; RV32IFD-NEXT: addi sp, sp, 16
202; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000203;
204; RV64IFD-LABEL: fcmp_ord:
205; RV64IFD: # %bb.0:
206; RV64IFD-NEXT: fmv.d.x ft0, a1
207; RV64IFD-NEXT: feq.d a1, ft0, ft0
208; RV64IFD-NEXT: fmv.d.x ft0, a0
209; RV64IFD-NEXT: feq.d a0, ft0, ft0
210; RV64IFD-NEXT: and a0, a0, a1
211; RV64IFD-NEXT: seqz a0, a0
212; RV64IFD-NEXT: xori a0, a0, 1
213; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000214 %1 = fcmp ord double %a, %b
215 %2 = zext i1 %1 to i32
216 ret i32 %2
217}
218
219define i32 @fcmp_ueq(double %a, double %b) nounwind {
220; RV32IFD-LABEL: fcmp_ueq:
221; RV32IFD: # %bb.0:
222; RV32IFD-NEXT: addi sp, sp, -16
223; RV32IFD-NEXT: sw a2, 8(sp)
224; RV32IFD-NEXT: sw a3, 12(sp)
225; RV32IFD-NEXT: fld ft0, 8(sp)
226; RV32IFD-NEXT: sw a0, 8(sp)
227; RV32IFD-NEXT: sw a1, 12(sp)
228; RV32IFD-NEXT: fld ft1, 8(sp)
229; RV32IFD-NEXT: feq.d a0, ft1, ft0
230; RV32IFD-NEXT: feq.d a1, ft0, ft0
231; RV32IFD-NEXT: feq.d a2, ft1, ft1
232; RV32IFD-NEXT: and a1, a2, a1
233; RV32IFD-NEXT: seqz a1, a1
234; RV32IFD-NEXT: or a0, a0, a1
235; RV32IFD-NEXT: addi sp, sp, 16
236; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000237;
238; RV64IFD-LABEL: fcmp_ueq:
239; RV64IFD: # %bb.0:
240; RV64IFD-NEXT: fmv.d.x ft0, a1
241; RV64IFD-NEXT: fmv.d.x ft1, a0
242; RV64IFD-NEXT: feq.d a0, ft1, ft0
243; RV64IFD-NEXT: feq.d a1, ft0, ft0
244; RV64IFD-NEXT: feq.d a2, ft1, ft1
245; RV64IFD-NEXT: and a1, a2, a1
246; RV64IFD-NEXT: seqz a1, a1
247; RV64IFD-NEXT: or a0, a0, a1
248; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000249 %1 = fcmp ueq double %a, %b
250 %2 = zext i1 %1 to i32
251 ret i32 %2
252}
253
254define i32 @fcmp_ugt(double %a, double %b) nounwind {
255; RV32IFD-LABEL: fcmp_ugt:
256; RV32IFD: # %bb.0:
257; RV32IFD-NEXT: addi sp, sp, -16
258; RV32IFD-NEXT: sw a2, 8(sp)
259; RV32IFD-NEXT: sw a3, 12(sp)
260; RV32IFD-NEXT: fld ft0, 8(sp)
261; RV32IFD-NEXT: sw a0, 8(sp)
262; RV32IFD-NEXT: sw a1, 12(sp)
263; RV32IFD-NEXT: fld ft1, 8(sp)
264; RV32IFD-NEXT: fle.d a0, ft1, ft0
265; RV32IFD-NEXT: xori a0, a0, 1
266; RV32IFD-NEXT: addi sp, sp, 16
267; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000268;
269; RV64IFD-LABEL: fcmp_ugt:
270; RV64IFD: # %bb.0:
271; RV64IFD-NEXT: fmv.d.x ft0, a1
272; RV64IFD-NEXT: fmv.d.x ft1, a0
273; RV64IFD-NEXT: fle.d a0, ft1, ft0
274; RV64IFD-NEXT: xori a0, a0, 1
275; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000276 %1 = fcmp ugt double %a, %b
277 %2 = zext i1 %1 to i32
278 ret i32 %2
279}
280
281define i32 @fcmp_uge(double %a, double %b) nounwind {
282; RV32IFD-LABEL: fcmp_uge:
283; RV32IFD: # %bb.0:
284; RV32IFD-NEXT: addi sp, sp, -16
285; RV32IFD-NEXT: sw a2, 8(sp)
286; RV32IFD-NEXT: sw a3, 12(sp)
287; RV32IFD-NEXT: fld ft0, 8(sp)
288; RV32IFD-NEXT: sw a0, 8(sp)
289; RV32IFD-NEXT: sw a1, 12(sp)
290; RV32IFD-NEXT: fld ft1, 8(sp)
291; RV32IFD-NEXT: flt.d a0, ft1, ft0
292; RV32IFD-NEXT: xori a0, a0, 1
293; RV32IFD-NEXT: addi sp, sp, 16
294; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000295;
296; RV64IFD-LABEL: fcmp_uge:
297; RV64IFD: # %bb.0:
298; RV64IFD-NEXT: fmv.d.x ft0, a1
299; RV64IFD-NEXT: fmv.d.x ft1, a0
300; RV64IFD-NEXT: flt.d a0, ft1, ft0
301; RV64IFD-NEXT: xori a0, a0, 1
302; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000303 %1 = fcmp uge double %a, %b
304 %2 = zext i1 %1 to i32
305 ret i32 %2
306}
307
308define i32 @fcmp_ult(double %a, double %b) nounwind {
309; RV32IFD-LABEL: fcmp_ult:
310; RV32IFD: # %bb.0:
311; RV32IFD-NEXT: addi sp, sp, -16
312; RV32IFD-NEXT: sw a0, 8(sp)
313; RV32IFD-NEXT: sw a1, 12(sp)
314; RV32IFD-NEXT: fld ft0, 8(sp)
315; RV32IFD-NEXT: sw a2, 8(sp)
316; RV32IFD-NEXT: sw a3, 12(sp)
317; RV32IFD-NEXT: fld ft1, 8(sp)
318; RV32IFD-NEXT: fle.d a0, ft1, ft0
319; RV32IFD-NEXT: xori a0, a0, 1
320; RV32IFD-NEXT: addi sp, sp, 16
321; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000322;
323; RV64IFD-LABEL: fcmp_ult:
324; RV64IFD: # %bb.0:
325; RV64IFD-NEXT: fmv.d.x ft0, a0
326; RV64IFD-NEXT: fmv.d.x ft1, a1
327; RV64IFD-NEXT: fle.d a0, ft1, ft0
328; RV64IFD-NEXT: xori a0, a0, 1
329; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000330 %1 = fcmp ult double %a, %b
331 %2 = zext i1 %1 to i32
332 ret i32 %2
333}
334
335define i32 @fcmp_ule(double %a, double %b) nounwind {
336; RV32IFD-LABEL: fcmp_ule:
337; RV32IFD: # %bb.0:
338; RV32IFD-NEXT: addi sp, sp, -16
339; RV32IFD-NEXT: sw a0, 8(sp)
340; RV32IFD-NEXT: sw a1, 12(sp)
341; RV32IFD-NEXT: fld ft0, 8(sp)
342; RV32IFD-NEXT: sw a2, 8(sp)
343; RV32IFD-NEXT: sw a3, 12(sp)
344; RV32IFD-NEXT: fld ft1, 8(sp)
345; RV32IFD-NEXT: flt.d a0, ft1, ft0
346; RV32IFD-NEXT: xori a0, a0, 1
347; RV32IFD-NEXT: addi sp, sp, 16
348; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000349;
350; RV64IFD-LABEL: fcmp_ule:
351; RV64IFD: # %bb.0:
352; RV64IFD-NEXT: fmv.d.x ft0, a0
353; RV64IFD-NEXT: fmv.d.x ft1, a1
354; RV64IFD-NEXT: flt.d a0, ft1, ft0
355; RV64IFD-NEXT: xori a0, a0, 1
356; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000357 %1 = fcmp ule double %a, %b
358 %2 = zext i1 %1 to i32
359 ret i32 %2
360}
361
362define i32 @fcmp_une(double %a, double %b) nounwind {
363; RV32IFD-LABEL: fcmp_une:
364; RV32IFD: # %bb.0:
365; RV32IFD-NEXT: addi sp, sp, -16
366; RV32IFD-NEXT: sw a2, 8(sp)
367; RV32IFD-NEXT: sw a3, 12(sp)
368; RV32IFD-NEXT: fld ft0, 8(sp)
369; RV32IFD-NEXT: sw a0, 8(sp)
370; RV32IFD-NEXT: sw a1, 12(sp)
371; RV32IFD-NEXT: fld ft1, 8(sp)
372; RV32IFD-NEXT: feq.d a0, ft1, ft0
373; RV32IFD-NEXT: xori a0, a0, 1
374; RV32IFD-NEXT: addi sp, sp, 16
375; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000376;
377; RV64IFD-LABEL: fcmp_une:
378; RV64IFD: # %bb.0:
379; RV64IFD-NEXT: fmv.d.x ft0, a1
380; RV64IFD-NEXT: fmv.d.x ft1, a0
381; RV64IFD-NEXT: feq.d a0, ft1, ft0
382; RV64IFD-NEXT: xori a0, a0, 1
383; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000384 %1 = fcmp une double %a, %b
385 %2 = zext i1 %1 to i32
386 ret i32 %2
387}
388
389define i32 @fcmp_uno(double %a, double %b) nounwind {
390; RV32IFD-LABEL: fcmp_uno:
391; RV32IFD: # %bb.0:
392; RV32IFD-NEXT: addi sp, sp, -16
393; RV32IFD-NEXT: sw a0, 8(sp)
394; RV32IFD-NEXT: sw a1, 12(sp)
395; RV32IFD-NEXT: fld ft0, 8(sp)
396; RV32IFD-NEXT: sw a2, 8(sp)
397; RV32IFD-NEXT: sw a3, 12(sp)
398; RV32IFD-NEXT: fld ft1, 8(sp)
399; RV32IFD-NEXT: feq.d a0, ft1, ft1
400; RV32IFD-NEXT: feq.d a1, ft0, ft0
401; RV32IFD-NEXT: and a0, a1, a0
402; RV32IFD-NEXT: seqz a0, a0
403; RV32IFD-NEXT: addi sp, sp, 16
404; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000405;
406; RV64IFD-LABEL: fcmp_uno:
407; RV64IFD: # %bb.0:
408; RV64IFD-NEXT: fmv.d.x ft0, a1
409; RV64IFD-NEXT: feq.d a1, ft0, ft0
410; RV64IFD-NEXT: fmv.d.x ft0, a0
411; RV64IFD-NEXT: feq.d a0, ft0, ft0
412; RV64IFD-NEXT: and a0, a0, a1
413; RV64IFD-NEXT: seqz a0, a0
414; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000415 %1 = fcmp uno double %a, %b
416 %2 = zext i1 %1 to i32
417 ret i32 %2
418}
419
420define i32 @fcmp_true(double %a, double %b) nounwind {
421; RV32IFD-LABEL: fcmp_true:
422; RV32IFD: # %bb.0:
423; RV32IFD-NEXT: addi a0, zero, 1
424; RV32IFD-NEXT: ret
Alex Bradbury7539fa22019-02-01 03:53:30 +0000425;
426; RV64IFD-LABEL: fcmp_true:
427; RV64IFD: # %bb.0:
428; RV64IFD-NEXT: addi a0, zero, 1
429; RV64IFD-NEXT: ret
Alex Bradbury21d28fe2018-04-12 05:50:06 +0000430 %1 = fcmp true double %a, %b
431 %2 = zext i1 %1 to i32
432 ret i32 %2
433}