blob: ebf158a1144b989744bc5711821e764ecba54a8f [file] [log] [blame]
Ulrich Weigand9e3577f2013-05-06 16:17:29 +00001; Test 64-bit unsigned comparison in which the second operand is a variable.
2;
3; RUN: llc < %s -mtriple=s390x-linux-gnu | FileCheck %s
4
5; Check CLGR.
6define double @f1(double %a, double %b, i64 %i1, i64 %i2) {
Stephen Lind24ab202013-07-14 06:24:09 +00007; CHECK-LABEL: f1:
Richard Sandiford93183ee2013-09-18 09:56:40 +00008; CHECK: clgrjl %r2, %r3
Ulrich Weigand9e3577f2013-05-06 16:17:29 +00009; CHECK: ldr %f0, %f2
10; CHECK: br %r14
11 %cond = icmp ult i64 %i1, %i2
12 %res = select i1 %cond, double %a, double %b
13 ret double %res
14}
15
16; Check CLG with no displacement.
17define double @f2(double %a, double %b, i64 %i1, i64 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +000018; CHECK-LABEL: f2:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000019; CHECK: clg %r2, 0(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000020; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000021; CHECK: ldr %f0, %f2
22; CHECK: br %r14
23 %i2 = load i64 *%ptr
24 %cond = icmp ult i64 %i1, %i2
25 %res = select i1 %cond, double %a, double %b
26 ret double %res
27}
28
29; Check the high end of the aligned CLG range.
30define double @f3(double %a, double %b, i64 %i1, i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000031; CHECK-LABEL: f3:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000032; CHECK: clg %r2, 524280(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000033; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000034; CHECK: ldr %f0, %f2
35; CHECK: br %r14
36 %ptr = getelementptr i64 *%base, i64 65535
37 %i2 = load i64 *%ptr
38 %cond = icmp ult i64 %i1, %i2
39 %res = select i1 %cond, double %a, double %b
40 ret double %res
41}
42
43; Check the next doubleword up, which needs separate address logic.
44; Other sequences besides this one would be OK.
45define double @f4(double %a, double %b, i64 %i1, i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000046; CHECK-LABEL: f4:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000047; CHECK: agfi %r3, 524288
48; CHECK: clg %r2, 0(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000049; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000050; CHECK: ldr %f0, %f2
51; CHECK: br %r14
52 %ptr = getelementptr i64 *%base, i64 65536
53 %i2 = load i64 *%ptr
54 %cond = icmp ult i64 %i1, %i2
55 %res = select i1 %cond, double %a, double %b
56 ret double %res
57}
58
59; Check the high end of the negative aligned CLG range.
60define double @f5(double %a, double %b, i64 %i1, i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000061; CHECK-LABEL: f5:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000062; CHECK: clg %r2, -8(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000063; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000064; CHECK: ldr %f0, %f2
65; CHECK: br %r14
66 %ptr = getelementptr i64 *%base, i64 -1
67 %i2 = load i64 *%ptr
68 %cond = icmp ult i64 %i1, %i2
69 %res = select i1 %cond, double %a, double %b
70 ret double %res
71}
72
73; Check the low end of the CLG range.
74define double @f6(double %a, double %b, i64 %i1, i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000075; CHECK-LABEL: f6:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000076; CHECK: clg %r2, -524288(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000077; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000078; CHECK: ldr %f0, %f2
79; CHECK: br %r14
80 %ptr = getelementptr i64 *%base, i64 -65536
81 %i2 = load i64 *%ptr
82 %cond = icmp ult i64 %i1, %i2
83 %res = select i1 %cond, double %a, double %b
84 ret double %res
85}
86
87; Check the next doubleword down, which needs separate address logic.
88; Other sequences besides this one would be OK.
89define double @f7(double %a, double %b, i64 %i1, i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000090; CHECK-LABEL: f7:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000091; CHECK: agfi %r3, -524296
92; CHECK: clg %r2, 0(%r3)
Richard Sandiford586f4172013-05-21 08:53:17 +000093; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000094; CHECK: ldr %f0, %f2
95; CHECK: br %r14
96 %ptr = getelementptr i64 *%base, i64 -65537
97 %i2 = load i64 *%ptr
98 %cond = icmp ult i64 %i1, %i2
99 %res = select i1 %cond, double %a, double %b
100 ret double %res
101}
102
103; Check that CLG allows an index.
104define double @f8(double %a, double %b, i64 %i1, i64 %base, i64 %index) {
Stephen Lind24ab202013-07-14 06:24:09 +0000105; CHECK-LABEL: f8:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000106; CHECK: clg %r2, 524280({{%r4,%r3|%r3,%r4}})
Richard Sandiford586f4172013-05-21 08:53:17 +0000107; CHECK-NEXT: jl
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000108; CHECK: ldr %f0, %f2
109; CHECK: br %r14
110 %add1 = add i64 %base, %index
111 %add2 = add i64 %add1, 524280
112 %ptr = inttoptr i64 %add2 to i64 *
113 %i2 = load i64 *%ptr
114 %cond = icmp ult i64 %i1, %i2
115 %res = select i1 %cond, double %a, double %b
116 ret double %res
117}
Richard Sandiford24e597b2013-08-23 11:27:19 +0000118
119; Check the comparison can be reversed if that allows CLG to be used.
120define double @f9(double %a, double %b, i64 %i2, i64 *%ptr) {
121; CHECK-LABEL: f9:
122; CHECK: clg %r2, 0(%r3)
123; CHECK-NEXT: jh {{\.L.*}}
124; CHECK: ldr %f0, %f2
125; CHECK: br %r14
126 %i1 = load i64 *%ptr
127 %cond = icmp ult i64 %i1, %i2
128 %res = select i1 %cond, double %a, double %b
129 ret double %res
130}