blob: 16f8d0160633514b6e0070d2574dffc4821ebe38 [file] [log] [blame]
Simon Pilgrimfc4d4b22016-07-19 13:35:11 +00001; RUN: llc -mtriple=arm64-eabi < %s | FileCheck %s
Tim Northover00ed9962014-03-29 10:18:08 +00002; rdar://10232252
3
4@object = external hidden global i64, section "__DATA, __objc_ivar", align 8
5
6; base + offset (imm9)
7; CHECK: @t1
Peter Collingbourne5ab4a472018-04-23 19:09:34 +00008; CHECK: ldr xzr, [x0, #8]
Tim Northover00ed9962014-03-29 10:18:08 +00009; CHECK: ret
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000010define void @t1(i64* %object) {
11 %incdec.ptr = getelementptr inbounds i64, i64* %object, i64 1
David Blaikiea79ac142015-02-27 21:17:42 +000012 %tmp = load volatile i64, i64* %incdec.ptr, align 8
Tim Northover00ed9962014-03-29 10:18:08 +000013 ret void
14}
15
16; base + offset (> imm9)
17; CHECK: @t2
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000018; CHECK: sub [[ADDREG:x[0-9]+]], x0, #264
Tim Northover00ed9962014-03-29 10:18:08 +000019; CHECK: ldr xzr, [
Tim Northover00ed9962014-03-29 10:18:08 +000020; CHECK: ret
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000021define void @t2(i64* %object) {
22 %incdec.ptr = getelementptr inbounds i64, i64* %object, i64 -33
David Blaikiea79ac142015-02-27 21:17:42 +000023 %tmp = load volatile i64, i64* %incdec.ptr, align 8
Tim Northover00ed9962014-03-29 10:18:08 +000024 ret void
25}
26
27; base + unsigned offset (> imm9 and <= imm12 * size of type in bytes)
28; CHECK: @t3
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000029; CHECK: ldr xzr, [x0, #32760]
Tim Northover00ed9962014-03-29 10:18:08 +000030; CHECK: ret
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000031define void @t3(i64* %object) {
32 %incdec.ptr = getelementptr inbounds i64, i64* %object, i64 4095
David Blaikiea79ac142015-02-27 21:17:42 +000033 %tmp = load volatile i64, i64* %incdec.ptr, align 8
Tim Northover00ed9962014-03-29 10:18:08 +000034 ret void
35}
36
37; base + unsigned offset (> imm12 * size of type in bytes)
38; CHECK: @t4
Hao Liu3cb826c2014-10-14 06:50:36 +000039; CHECK: orr w[[NUM:[0-9]+]], wzr, #0x8000
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000040; CHECK: ldr xzr, [x0, x[[NUM]]]
Tim Northover00ed9962014-03-29 10:18:08 +000041; CHECK: ret
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000042define void @t4(i64* %object) {
43 %incdec.ptr = getelementptr inbounds i64, i64* %object, i64 4096
David Blaikiea79ac142015-02-27 21:17:42 +000044 %tmp = load volatile i64, i64* %incdec.ptr, align 8
Tim Northover00ed9962014-03-29 10:18:08 +000045 ret void
46}
47
48; base + reg
49; CHECK: @t5
50; CHECK: ldr xzr, [x{{[0-9]+}}, x{{[0-9]+}}, lsl #3]
51; CHECK: ret
52define void @t5(i64 %a) {
David Blaikie79e6c742015-02-27 19:29:02 +000053 %incdec.ptr = getelementptr inbounds i64, i64* @object, i64 %a
David Blaikiea79ac142015-02-27 21:17:42 +000054 %tmp = load volatile i64, i64* %incdec.ptr, align 8
Tim Northover00ed9962014-03-29 10:18:08 +000055 ret void
56}
57
58; base + reg + imm
59; CHECK: @t6
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000060; CHECK: add [[ADDREG:x[0-9]+]], x1, x0, lsl #3
Hao Liu3cb826c2014-10-14 06:50:36 +000061; CHECK-NEXT: orr w[[NUM:[0-9]+]], wzr, #0x8000
62; CHECK: ldr xzr, [x{{[0-9]+}}, x[[NUM]]]
Tim Northover00ed9962014-03-29 10:18:08 +000063; CHECK: ret
Peter Collingbourne5ab4a472018-04-23 19:09:34 +000064define void @t6(i64 %a, i64* %object) {
65 %tmp1 = getelementptr inbounds i64, i64* %object, i64 %a
David Blaikie79e6c742015-02-27 19:29:02 +000066 %incdec.ptr = getelementptr inbounds i64, i64* %tmp1, i64 4096
David Blaikiea79ac142015-02-27 21:17:42 +000067 %tmp = load volatile i64, i64* %incdec.ptr, align 8
Tim Northover00ed9962014-03-29 10:18:08 +000068 ret void
69}
Hao Liu3cb826c2014-10-14 06:50:36 +000070
71; Test base + wide immediate
72define void @t7(i64 %a) {
73; CHECK-LABEL: t7:
74; CHECK: orr w[[NUM:[0-9]+]], wzr, #0xffff
75; CHECK-NEXT: ldr xzr, [x0, x[[NUM]]]
76 %1 = add i64 %a, 65535 ;0xffff
77 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +000078 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +000079 ret void
80}
81
82define void @t8(i64 %a) {
83; CHECK-LABEL: t8:
Tim Northoverdaa1c012016-06-16 01:42:25 +000084; CHECK: mov [[REG:x[0-9]+]], #-4662
Hao Liu3cb826c2014-10-14 06:50:36 +000085; CHECK-NEXT: ldr xzr, [x0, [[REG]]]
86 %1 = sub i64 %a, 4662 ;-4662 is 0xffffffffffffedca
87 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +000088 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +000089 ret void
90}
91
92define void @t9(i64 %a) {
93; CHECK-LABEL: t9:
Tim Northoverdaa1c012016-06-16 01:42:25 +000094; CHECK: mov [[REG:x[0-9]+]], #-305463297
Hao Liu3cb826c2014-10-14 06:50:36 +000095; CHECK-NEXT: ldr xzr, [x0, [[REG]]]
96 %1 = add i64 -305463297, %a ;-305463297 is 0xffffffffedcaffff
97 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +000098 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +000099 ret void
100}
101
102define void @t10(i64 %a) {
103; CHECK-LABEL: t10:
Tim Northoverdaa1c012016-06-16 01:42:25 +0000104; CHECK: mov [[REG:x[0-9]+]], #81909218222800896
Hao Liu3cb826c2014-10-14 06:50:36 +0000105; CHECK-NEXT: ldr xzr, [x0, [[REG]]]
106 %1 = add i64 %a, 81909218222800896 ;0x123000000000000
107 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000108 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000109 ret void
110}
111
112define void @t11(i64 %a) {
113; CHECK-LABEL: t11:
Evandro Menezes7960b2e2017-01-18 18:57:08 +0000114; CHECK: mov w[[NUM:[0-9]+]], #17767
115; CHECK: movk w[[NUM:[0-9]+]], #291
Hao Liu3cb826c2014-10-14 06:50:36 +0000116; CHECK-NEXT: ldr xzr, [x0, x[[NUM]]]
117 %1 = add i64 %a, 19088743 ;0x1234567
118 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000119 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000120 ret void
121}
122
123; Test some boundaries that should not use movz/movn/orr
124define void @t12(i64 %a) {
125; CHECK-LABEL: t12:
126; CHECK: add [[REG:x[0-9]+]], x0, #4095
127; CHECK-NEXT: ldr xzr, {{\[}}[[REG]]]
128 %1 = add i64 %a, 4095 ;0xfff
129 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000130 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000131 ret void
132}
133
134define void @t13(i64 %a) {
135; CHECK-LABEL: t13:
136; CHECK: sub [[REG:x[0-9]+]], x0, #4095
137; CHECK-NEXT: ldr xzr, {{\[}}[[REG]]]
138 %1 = add i64 %a, -4095 ;-0xfff
139 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000140 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000141 ret void
142}
143
144define void @t14(i64 %a) {
145; CHECK-LABEL: t14:
146; CHECK: add [[REG:x[0-9]+]], x0, #291, lsl #12
147; CHECK-NEXT: ldr xzr, {{\[}}[[REG]]]
148 %1 = add i64 %a, 1191936 ;0x123000
149 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000150 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000151 ret void
152}
153
154define void @t15(i64 %a) {
155; CHECK-LABEL: t15:
156; CHECK: sub [[REG:x[0-9]+]], x0, #291, lsl #12
157; CHECK-NEXT: ldr xzr, {{\[}}[[REG]]]
158 %1 = add i64 %a, -1191936 ;0xFFFFFFFFFFEDD000
159 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000160 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000161 ret void
162}
163
164define void @t16(i64 %a) {
165; CHECK-LABEL: t16:
166; CHECK: ldr xzr, [x0, #28672]
167 %1 = add i64 %a, 28672 ;0x7000
168 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000169 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000170 ret void
171}
172
173define void @t17(i64 %a) {
174; CHECK-LABEL: t17:
175; CHECK: ldur xzr, [x0, #-256]
176 %1 = add i64 %a, -256 ;-0x100
177 %2 = inttoptr i64 %1 to i64*
David Blaikiea79ac142015-02-27 21:17:42 +0000178 %3 = load volatile i64, i64* %2, align 8
Hao Liu3cb826c2014-10-14 06:50:36 +0000179 ret void
180}