blob: 4a8a1a9ade33d1ee06b36ef1d2399f1655c53be1 [file] [log] [blame]
Ulrich Weigand9e3577f2013-05-06 16:17:29 +00001; Test 32-bit signed comparison in which the second operand is a variable.
2;
3; RUN: llc < %s -mtriple=s390x-linux-gnu | FileCheck %s
4
Richard Sandifordc3f85d72013-07-25 09:34:38 +00005declare i32 @foo()
6
Ulrich Weigand9e3577f2013-05-06 16:17:29 +00007; Check register comparison.
8define double @f1(double %a, double %b, i32 %i1, i32 %i2) {
Stephen Lind24ab202013-07-14 06:24:09 +00009; CHECK-LABEL: f1:
Richard Sandiford0fb90ab2013-05-28 10:41:11 +000010; CHECK: crjl %r2, %r3
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000011; CHECK: ldr %f0, %f2
12; CHECK: br %r14
13 %cond = icmp slt i32 %i1, %i2
14 %res = select i1 %cond, double %a, double %b
15 ret double %res
16}
17
18; Check the low end of the C range.
19define double @f2(double %a, double %b, i32 %i1, i32 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +000020; CHECK-LABEL: f2:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000021; CHECK: c %r2, 0(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000022; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000023; CHECK: ldr %f0, %f2
24; CHECK: br %r14
25 %i2 = load i32 *%ptr
26 %cond = icmp slt i32 %i1, %i2
27 %res = select i1 %cond, double %a, double %b
28 ret double %res
29}
30
31; Check the high end of the aligned C range.
32define double @f3(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000033; CHECK-LABEL: f3:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000034; CHECK: c %r2, 4092(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000035; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000036; CHECK: ldr %f0, %f2
37; CHECK: br %r14
38 %ptr = getelementptr i32 *%base, i64 1023
39 %i2 = load i32 *%ptr
40 %cond = icmp slt i32 %i1, %i2
41 %res = select i1 %cond, double %a, double %b
42 ret double %res
43}
44
45; Check the next word up, which should use CY instead of C.
46define double @f4(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000047; CHECK-LABEL: f4:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000048; CHECK: cy %r2, 4096(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000049; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000050; CHECK: ldr %f0, %f2
51; CHECK: br %r14
52 %ptr = getelementptr i32 *%base, i64 1024
53 %i2 = load i32 *%ptr
54 %cond = icmp slt i32 %i1, %i2
55 %res = select i1 %cond, double %a, double %b
56 ret double %res
57}
58
59; Check the high end of the aligned CY range.
60define double @f5(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000061; CHECK-LABEL: f5:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000062; CHECK: cy %r2, 524284(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000063; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000064; CHECK: ldr %f0, %f2
65; CHECK: br %r14
66 %ptr = getelementptr i32 *%base, i64 131071
67 %i2 = load i32 *%ptr
68 %cond = icmp slt i32 %i1, %i2
69 %res = select i1 %cond, double %a, double %b
70 ret double %res
71}
72
73; Check the next word up, which needs separate address logic.
74; Other sequences besides this one would be OK.
75define double @f6(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000076; CHECK-LABEL: f6:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000077; CHECK: agfi %r3, 524288
78; CHECK: c %r2, 0(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000079; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000080; CHECK: ldr %f0, %f2
81; CHECK: br %r14
82 %ptr = getelementptr i32 *%base, i64 131072
83 %i2 = load i32 *%ptr
84 %cond = icmp slt i32 %i1, %i2
85 %res = select i1 %cond, double %a, double %b
86 ret double %res
87}
88
89; Check the high end of the negative aligned CY range.
90define double @f7(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000091; CHECK-LABEL: f7:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000092; CHECK: cy %r2, -4(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000093; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000094; CHECK: ldr %f0, %f2
95; CHECK: br %r14
96 %ptr = getelementptr i32 *%base, i64 -1
97 %i2 = load i32 *%ptr
98 %cond = icmp slt i32 %i1, %i2
99 %res = select i1 %cond, double %a, double %b
100 ret double %res
101}
102
103; Check the low end of the CY range.
104define double @f8(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +0000105; CHECK-LABEL: f8:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000106; CHECK: cy %r2, -524288(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +0000107; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000108; CHECK: ldr %f0, %f2
109; CHECK: br %r14
110 %ptr = getelementptr i32 *%base, i64 -131072
111 %i2 = load i32 *%ptr
112 %cond = icmp slt i32 %i1, %i2
113 %res = select i1 %cond, double %a, double %b
114 ret double %res
115}
116
117; Check the next word down, which needs separate address logic.
118; Other sequences besides this one would be OK.
119define double @f9(double %a, double %b, i32 %i1, i32 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +0000120; CHECK-LABEL: f9:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000121; CHECK: agfi %r3, -524292
122; CHECK: c %r2, 0(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +0000123; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000124; CHECK: ldr %f0, %f2
125; CHECK: br %r14
126 %ptr = getelementptr i32 *%base, i64 -131073
127 %i2 = load i32 *%ptr
128 %cond = icmp slt i32 %i1, %i2
129 %res = select i1 %cond, double %a, double %b
130 ret double %res
131}
132
133; Check that C allows an index.
134define double @f10(double %a, double %b, i32 %i1, i64 %base, i64 %index) {
Stephen Lind24ab202013-07-14 06:24:09 +0000135; CHECK-LABEL: f10:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000136; CHECK: c %r2, 4092({{%r4,%r3|%r3,%r4}})
Richard Sandiford586f4172013-05-21 08:53:17 +0000137; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000138; CHECK: ldr %f0, %f2
139; CHECK: br %r14
140 %add1 = add i64 %base, %index
141 %add2 = add i64 %add1, 4092
142 %ptr = inttoptr i64 %add2 to i32 *
143 %i2 = load i32 *%ptr
144 %cond = icmp slt i32 %i1, %i2
145 %res = select i1 %cond, double %a, double %b
146 ret double %res
147}
148
149; Check that CY allows an index.
150define double @f11(double %a, double %b, i32 %i1, i64 %base, i64 %index) {
Stephen Lind24ab202013-07-14 06:24:09 +0000151; CHECK-LABEL: f11:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000152; CHECK: cy %r2, 4096({{%r4,%r3|%r3,%r4}})
Richard Sandiford586f4172013-05-21 08:53:17 +0000153; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000154; CHECK: ldr %f0, %f2
155; CHECK: br %r14
156 %add1 = add i64 %base, %index
157 %add2 = add i64 %add1, 4096
158 %ptr = inttoptr i64 %add2 to i32 *
159 %i2 = load i32 *%ptr
160 %cond = icmp slt i32 %i1, %i2
161 %res = select i1 %cond, double %a, double %b
162 ret double %res
163}
Richard Sandifordc3f85d72013-07-25 09:34:38 +0000164
165; The first branch here got recreated by InsertBranch while splitting the
166; critical edge %entry->%while.body, which lost the kills information for CC.
167define void @f12(i32 %a, i32 %b) {
168; CHECK-LABEL: f12:
Richard Sandiford3d768e32013-07-31 12:30:20 +0000169; CHECK: cije %r2, 0
Richard Sandifordc3f85d72013-07-25 09:34:38 +0000170; CHECK: crjlh %r2,
171; CHECK: br %r14
172entry:
Richard Sandiford3d768e32013-07-31 12:30:20 +0000173 %cmp11 = icmp eq i32 %a, 0
Richard Sandifordc3f85d72013-07-25 09:34:38 +0000174 br i1 %cmp11, label %while.end, label %while.body
175
176while.body:
177 %c = call i32 @foo()
178 %cmp12 = icmp eq i32 %c, %b
179 br i1 %cmp12, label %while.end, label %while.body
180
181while.end:
182 ret void
183}
Richard Sandiford24e597b2013-08-23 11:27:19 +0000184
185; Check the comparison can be reversed if that allows C to be used.
186define double @f13(double %a, double %b, i32 %i2, i32 *%ptr) {
187; CHECK-LABEL: f13:
188; CHECK: c %r2, 0(%r3)
189; CHECK-NEXT: jh {{\.L.*}}
190; CHECK: ldr %f0, %f2
191; CHECK: br %r14
192 %i1 = load i32 *%ptr
193 %cond = icmp slt i32 %i1, %i2
194 %res = select i1 %cond, double %a, double %b
195 ret double %res
196}