Evan Cheng | 68132d8 | 2011-12-20 18:26:50 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -O0 -fast-isel-abort -relocation-model=dynamic-no-pic -mtriple=armv7-apple-ios | FileCheck %s --check-prefix=ARM |
| 2 | ; RUN: llc < %s -O0 -fast-isel-abort -relocation-model=dynamic-no-pic -mtriple=thumbv7-apple-ios | FileCheck %s --check-prefix=THUMB |
Chad Rosier | 2364f58 | 2012-09-21 00:41:42 +0000 | [diff] [blame] | 3 | ; RUN: llc < %s -O0 -arm-strict-align -relocation-model=dynamic-no-pic -mtriple=armv7-apple-ios | FileCheck %s --check-prefix=ARM-STRICT-ALIGN |
| 4 | ; RUN: llc < %s -O0 -arm-strict-align -relocation-model=dynamic-no-pic -mtriple=thumbv7-apple-ios | FileCheck %s --check-prefix=THUMB-STRICT-ALIGN |
Eric Christopher | 84bdfd8 | 2010-07-21 22:26:11 +0000 | [diff] [blame] | 5 | |
| 6 | ; Very basic fast-isel functionality. |
Eric Christopher | ca2ec95 | 2010-09-08 19:32:34 +0000 | [diff] [blame] | 7 | define i32 @add(i32 %a, i32 %b) nounwind { |
Eric Christopher | 84bdfd8 | 2010-07-21 22:26:11 +0000 | [diff] [blame] | 8 | entry: |
| 9 | %a.addr = alloca i32, align 4 |
| 10 | %b.addr = alloca i32, align 4 |
| 11 | store i32 %a, i32* %a.addr |
| 12 | store i32 %b, i32* %b.addr |
| 13 | %tmp = load i32* %a.addr |
| 14 | %tmp1 = load i32* %b.addr |
| 15 | %add = add nsw i32 %tmp, %tmp1 |
| 16 | ret i32 %add |
Eric Christopher | 8d46b47 | 2011-04-29 20:02:39 +0000 | [diff] [blame] | 17 | } |
| 18 | |
Eli Friedman | 328bad0 | 2011-04-29 22:48:03 +0000 | [diff] [blame] | 19 | ; Check truncate to bool |
Eric Christopher | 8d46b47 | 2011-04-29 20:02:39 +0000 | [diff] [blame] | 20 | define void @test1(i32 %tmp) nounwind { |
| 21 | entry: |
| 22 | %tobool = trunc i32 %tmp to i1 |
| 23 | br i1 %tobool, label %if.then, label %if.end |
| 24 | |
| 25 | if.then: ; preds = %entry |
| 26 | call void @test1(i32 0) |
| 27 | br label %if.end |
| 28 | |
| 29 | if.end: ; preds = %if.then, %entry |
| 30 | ret void |
Eli Friedman | 328bad0 | 2011-04-29 22:48:03 +0000 | [diff] [blame] | 31 | ; ARM: test1: |
| 32 | ; ARM: tst r0, #1 |
| 33 | ; THUMB: test1: |
| 34 | ; THUMB: tst.w r0, #1 |
| 35 | } |
| 36 | |
| 37 | ; Check some simple operations with immediates |
| 38 | define void @test2(i32 %tmp, i32* %ptr) nounwind { |
| 39 | ; THUMB: test2: |
| 40 | ; ARM: test2: |
| 41 | |
| 42 | b1: |
Eli Friedman | 4105ed1 | 2011-04-29 23:34:52 +0000 | [diff] [blame] | 43 | %a = add i32 %tmp, 4096 |
| 44 | store i32 %a, i32* %ptr |
Eli Friedman | 328bad0 | 2011-04-29 22:48:03 +0000 | [diff] [blame] | 45 | br label %b2 |
| 46 | |
| 47 | ; THUMB: add.w {{.*}} #4096 |
Jim Grosbach | 581da64 | 2011-07-11 16:48:36 +0000 | [diff] [blame] | 48 | ; ARM: add {{.*}} #4096 |
Eli Friedman | 328bad0 | 2011-04-29 22:48:03 +0000 | [diff] [blame] | 49 | |
| 50 | b2: |
Eli Friedman | 4105ed1 | 2011-04-29 23:34:52 +0000 | [diff] [blame] | 51 | %b = add i32 %tmp, 4095 |
| 52 | store i32 %b, i32* %ptr |
| 53 | br label %b3 |
| 54 | ; THUMB: addw {{.*}} #4095 |
| 55 | ; ARM: movw {{.*}} #4095 |
| 56 | ; ARM: add |
| 57 | |
| 58 | b3: |
Eli Friedman | 328bad0 | 2011-04-29 22:48:03 +0000 | [diff] [blame] | 59 | %c = or i32 %tmp, 4 |
| 60 | store i32 %c, i32* %ptr |
| 61 | ret void |
| 62 | |
| 63 | ; THUMB: orr {{.*}} #4 |
| 64 | ; ARM: orr {{.*}} #4 |
Eric Christopher | 8d46b47 | 2011-04-29 20:02:39 +0000 | [diff] [blame] | 65 | } |
Eli Friedman | c703551 | 2011-05-25 23:49:02 +0000 | [diff] [blame] | 66 | |
| 67 | define void @test3(i32 %tmp, i32* %ptr1, i16* %ptr2, i8* %ptr3) nounwind { |
| 68 | ; THUMB: test3: |
| 69 | ; ARM: test3: |
| 70 | |
| 71 | bb1: |
| 72 | %a1 = trunc i32 %tmp to i16 |
| 73 | %a2 = trunc i16 %a1 to i8 |
| 74 | %a3 = trunc i8 %a2 to i1 |
| 75 | %a4 = zext i1 %a3 to i8 |
| 76 | store i8 %a4, i8* %ptr3 |
| 77 | %a5 = zext i8 %a4 to i16 |
| 78 | store i16 %a5, i16* %ptr2 |
| 79 | %a6 = zext i16 %a5 to i32 |
| 80 | store i32 %a6, i32* %ptr1 |
| 81 | br label %bb2 |
| 82 | |
| 83 | ; THUMB: and |
| 84 | ; THUMB: strb |
| 85 | ; THUMB: uxtb |
| 86 | ; THUMB: strh |
| 87 | ; THUMB: uxth |
| 88 | ; ARM: and |
| 89 | ; ARM: strb |
| 90 | ; ARM: uxtb |
| 91 | ; ARM: strh |
| 92 | ; ARM: uxth |
| 93 | |
| 94 | bb2: |
| 95 | %b1 = trunc i32 %tmp to i16 |
| 96 | %b2 = trunc i16 %b1 to i8 |
| 97 | store i8 %b2, i8* %ptr3 |
| 98 | %b3 = sext i8 %b2 to i16 |
| 99 | store i16 %b3, i16* %ptr2 |
| 100 | %b4 = sext i16 %b3 to i32 |
| 101 | store i32 %b4, i32* %ptr1 |
| 102 | br label %bb3 |
| 103 | |
| 104 | ; THUMB: strb |
| 105 | ; THUMB: sxtb |
| 106 | ; THUMB: strh |
| 107 | ; THUMB: sxth |
| 108 | ; ARM: strb |
| 109 | ; ARM: sxtb |
| 110 | ; ARM: strh |
| 111 | ; ARM: sxth |
| 112 | |
| 113 | bb3: |
| 114 | %c1 = load i8* %ptr3 |
| 115 | %c2 = load i16* %ptr2 |
| 116 | %c3 = load i32* %ptr1 |
| 117 | %c4 = zext i8 %c1 to i32 |
| 118 | %c5 = sext i16 %c2 to i32 |
| 119 | %c6 = add i32 %c4, %c5 |
| 120 | %c7 = sub i32 %c3, %c6 |
| 121 | store i32 %c7, i32* %ptr1 |
| 122 | ret void |
| 123 | |
| 124 | ; THUMB: ldrb |
| 125 | ; THUMB: ldrh |
| 126 | ; THUMB: uxtb |
| 127 | ; THUMB: sxth |
| 128 | ; THUMB: add |
| 129 | ; THUMB: sub |
| 130 | ; ARM: ldrb |
| 131 | ; ARM: ldrh |
| 132 | ; ARM: uxtb |
| 133 | ; ARM: sxth |
| 134 | ; ARM: add |
| 135 | ; ARM: sub |
Eli Friedman | 8658579 | 2011-06-03 01:13:19 +0000 | [diff] [blame] | 136 | } |
| 137 | |
| 138 | ; Check loads/stores with globals |
| 139 | @test4g = external global i32 |
| 140 | |
| 141 | define void @test4() { |
| 142 | %a = load i32* @test4g |
| 143 | %b = add i32 %a, 1 |
| 144 | store i32 %b, i32* @test4g |
| 145 | ret void |
| 146 | |
Derek Schuff | bd7c6e5 | 2013-05-14 16:26:38 +0000 | [diff] [blame^] | 147 | |
| 148 | ; Note that relocations are either movw/movt or constant pool |
| 149 | ; loads. Different platforms will select different approaches. |
| 150 | |
| 151 | ; THUMB: {{(movw r0, :lower16:L_test4g\$non_lazy_ptr)|(ldr.n r0, .LCPI)}} |
| 152 | ; THUMB: {{(movt r0, :upper16:L_test4g\$non_lazy_ptr)?}} |
Eli Friedman | 8658579 | 2011-06-03 01:13:19 +0000 | [diff] [blame] | 153 | ; THUMB: ldr r0, [r0] |
Jakob Stoklund Olesen | 8cdce7e | 2012-01-07 04:07:22 +0000 | [diff] [blame] | 154 | ; THUMB: ldr r1, [r0] |
| 155 | ; THUMB: adds r1, #1 |
| 156 | ; THUMB: str r1, [r0] |
Eli Friedman | 8658579 | 2011-06-03 01:13:19 +0000 | [diff] [blame] | 157 | |
Derek Schuff | bd7c6e5 | 2013-05-14 16:26:38 +0000 | [diff] [blame^] | 158 | ; ARM: {{(movw r0, :lower16:L_test4g\$non_lazy_ptr)|(ldr r0, .LCPI)}} |
| 159 | ; ARM: {{(movt r0, :upper16:L_test4g\$non_lazy_ptr)?}} |
Eli Friedman | 8658579 | 2011-06-03 01:13:19 +0000 | [diff] [blame] | 160 | ; ARM: ldr r0, [r0] |
Jakob Stoklund Olesen | 8cdce7e | 2012-01-07 04:07:22 +0000 | [diff] [blame] | 161 | ; ARM: ldr r1, [r0] |
| 162 | ; ARM: add r1, r1, #1 |
| 163 | ; ARM: str r1, [r0] |
Eli Friedman | 8658579 | 2011-06-03 01:13:19 +0000 | [diff] [blame] | 164 | } |
Chad Rosier | ec3b77e | 2011-12-03 02:21:57 +0000 | [diff] [blame] | 165 | |
| 166 | ; Check unaligned stores |
| 167 | %struct.anon = type <{ float }> |
| 168 | |
| 169 | @a = common global %struct.anon* null, align 4 |
| 170 | |
| 171 | define void @unaligned_store(float %x, float %y) nounwind { |
| 172 | entry: |
| 173 | ; ARM: @unaligned_store |
| 174 | ; ARM: vmov r1, s0 |
| 175 | ; ARM: str r1, [r0] |
| 176 | |
| 177 | ; THUMB: @unaligned_store |
| 178 | ; THUMB: vmov r1, s0 |
| 179 | ; THUMB: str r1, [r0] |
| 180 | |
| 181 | %add = fadd float %x, %y |
| 182 | %0 = load %struct.anon** @a, align 4 |
| 183 | %x1 = getelementptr inbounds %struct.anon* %0, i32 0, i32 0 |
| 184 | store float %add, float* %x1, align 1 |
| 185 | ret void |
| 186 | } |
Chad Rosier | c77830d | 2011-12-06 01:44:17 +0000 | [diff] [blame] | 187 | |
| 188 | ; Doublewords require only word-alignment. |
| 189 | ; rdar://10528060 |
| 190 | %struct.anon.0 = type { double } |
| 191 | |
| 192 | @foo_unpacked = common global %struct.anon.0 zeroinitializer, align 4 |
| 193 | |
| 194 | define void @test5(double %a, double %b) nounwind { |
| 195 | entry: |
| 196 | ; ARM: @test5 |
| 197 | ; THUMB: @test5 |
| 198 | %add = fadd double %a, %b |
| 199 | store double %add, double* getelementptr inbounds (%struct.anon.0* @foo_unpacked, i32 0, i32 0), align 4 |
| 200 | ; ARM: vstr d16, [r0] |
| 201 | ; THUMB: vstr d16, [r0] |
| 202 | ret void |
| 203 | } |
| 204 | |
Chad Rosier | 563de60 | 2011-12-13 19:22:14 +0000 | [diff] [blame] | 205 | ; Check unaligned loads of floats |
| 206 | %class.TAlignTest = type <{ i16, float }> |
| 207 | |
| 208 | define zeroext i1 @test6(%class.TAlignTest* %this) nounwind align 2 { |
| 209 | entry: |
| 210 | ; ARM: @test6 |
| 211 | ; THUMB: @test6 |
| 212 | %0 = alloca %class.TAlignTest*, align 4 |
| 213 | store %class.TAlignTest* %this, %class.TAlignTest** %0, align 4 |
| 214 | %1 = load %class.TAlignTest** %0 |
| 215 | %2 = getelementptr inbounds %class.TAlignTest* %1, i32 0, i32 1 |
| 216 | %3 = load float* %2, align 1 |
| 217 | %4 = fcmp une float %3, 0.000000e+00 |
| 218 | ; ARM: ldr r0, [r0, #2] |
| 219 | ; ARM: vmov s0, r0 |
| 220 | ; ARM: vcmpe.f32 s0, #0 |
| 221 | ; THUMB: ldr.w r0, [r0, #2] |
| 222 | ; THUMB: vmov s0, r0 |
| 223 | ; THUMB: vcmpe.f32 s0, #0 |
| 224 | ret i1 %4 |
Chad Rosier | 4020ae7 | 2011-12-14 01:34:39 +0000 | [diff] [blame] | 225 | } |
Chad Rosier | 6a63a74 | 2012-03-22 00:21:17 +0000 | [diff] [blame] | 226 | |
| 227 | ; ARM: @urem_fold |
| 228 | ; THUMB: @urem_fold |
| 229 | ; ARM: and r0, r0, #31 |
| 230 | ; THUMB: and r0, r0, #31 |
| 231 | define i32 @urem_fold(i32 %a) nounwind { |
| 232 | %rem = urem i32 %a, 32 |
| 233 | ret i32 %rem |
| 234 | } |
Chad Rosier | aa9cb9d | 2012-05-11 21:33:49 +0000 | [diff] [blame] | 235 | |
| 236 | define i32 @test7() noreturn nounwind { |
| 237 | entry: |
| 238 | ; ARM: @test7 |
| 239 | ; THUMB: @test7 |
| 240 | ; ARM: trap |
| 241 | ; THUMB: trap |
| 242 | tail call void @llvm.trap( ) |
| 243 | unreachable |
| 244 | } |
| 245 | |
| 246 | declare void @llvm.trap() nounwind |
Chad Rosier | 2364f58 | 2012-09-21 00:41:42 +0000 | [diff] [blame] | 247 | |
| 248 | define void @unaligned_i16_store(i16 %x, i16* %y) nounwind { |
| 249 | entry: |
| 250 | ; ARM-STRICT-ALIGN: @unaligned_i16_store |
| 251 | ; ARM-STRICT-ALIGN: strb |
Chad Rosier | 8ff5a4a | 2012-09-21 00:47:08 +0000 | [diff] [blame] | 252 | ; ARM-STRICT-ALIGN: strb |
Chad Rosier | 2364f58 | 2012-09-21 00:41:42 +0000 | [diff] [blame] | 253 | |
| 254 | ; THUMB-STRICT-ALIGN: @unaligned_i16_store |
| 255 | ; THUMB-STRICT-ALIGN: strb |
| 256 | ; THUMB-STRICT-ALIGN: strb |
| 257 | |
| 258 | store i16 %x, i16* %y, align 1 |
| 259 | ret void |
| 260 | } |
| 261 | |
| 262 | define i16 @unaligned_i16_load(i16* %x) nounwind { |
| 263 | entry: |
NAKAMURA Takumi | 1a38004 | 2012-09-21 01:15:05 +0000 | [diff] [blame] | 264 | ; ARM-STRICT-ALIGN: @unaligned_i16_load |
Chad Rosier | 2364f58 | 2012-09-21 00:41:42 +0000 | [diff] [blame] | 265 | ; ARM-STRICT-ALIGN: ldrb |
| 266 | ; ARM-STRICT-ALIGN: ldrb |
| 267 | |
NAKAMURA Takumi | 1a38004 | 2012-09-21 01:15:05 +0000 | [diff] [blame] | 268 | ; THUMB-STRICT-ALIGN: @unaligned_i16_load |
Chad Rosier | 2364f58 | 2012-09-21 00:41:42 +0000 | [diff] [blame] | 269 | ; THUMB-STRICT-ALIGN: ldrb |
| 270 | ; THUMB-STRICT-ALIGN: ldrb |
| 271 | |
| 272 | %0 = load i16* %x, align 1 |
| 273 | ret i16 %0 |
Chad Rosier | 1fb301a | 2012-09-21 00:43:18 +0000 | [diff] [blame] | 274 | } |
Chad Rosier | 8bf01fc | 2012-09-21 16:58:35 +0000 | [diff] [blame] | 275 | |
| 276 | define void @unaligned_i32_store(i32 %x, i32* %y) nounwind { |
| 277 | entry: |
| 278 | ; ARM-STRICT-ALIGN: @unaligned_i32_store |
| 279 | ; ARM-STRICT-ALIGN: strb |
| 280 | ; ARM-STRICT-ALIGN: strb |
| 281 | ; ARM-STRICT-ALIGN: strb |
| 282 | ; ARM-STRICT-ALIGN: strb |
| 283 | |
| 284 | ; THUMB-STRICT-ALIGN: @unaligned_i32_store |
| 285 | ; THUMB-STRICT-ALIGN: strb |
| 286 | ; THUMB-STRICT-ALIGN: strb |
| 287 | ; THUMB-STRICT-ALIGN: strb |
| 288 | ; THUMB-STRICT-ALIGN: strb |
| 289 | |
| 290 | store i32 %x, i32* %y, align 1 |
| 291 | ret void |
| 292 | } |
| 293 | |
| 294 | define i32 @unaligned_i32_load(i32* %x) nounwind { |
| 295 | entry: |
| 296 | ; ARM-STRICT-ALIGN: @unaligned_i32_load |
| 297 | ; ARM-STRICT-ALIGN: ldrb |
| 298 | ; ARM-STRICT-ALIGN: ldrb |
| 299 | ; ARM-STRICT-ALIGN: ldrb |
| 300 | ; ARM-STRICT-ALIGN: ldrb |
| 301 | |
| 302 | ; THUMB-STRICT-ALIGN: @unaligned_i32_load |
| 303 | ; THUMB-STRICT-ALIGN: ldrb |
| 304 | ; THUMB-STRICT-ALIGN: ldrb |
| 305 | ; THUMB-STRICT-ALIGN: ldrb |
| 306 | ; THUMB-STRICT-ALIGN: ldrb |
| 307 | |
| 308 | %0 = load i32* %x, align 1 |
| 309 | ret i32 %0 |
| 310 | } |