Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame^] | 1 | ; RUN: llc -mtriple=aarch64-none-linux-gnu < %s | FileCheck %s --check-prefix=CHECK |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 2 | |
| 3 | ; First, a simple example from Clang. The registers could plausibly be |
| 4 | ; different, but probably won't be. |
| 5 | |
| 6 | %struct.foo = type { i8, [2 x i8], i8 } |
| 7 | |
| 8 | define [1 x i64] @from_clang([1 x i64] %f.coerce, i32 %n) nounwind readnone { |
Stephen Lin | d24ab20 | 2013-07-14 06:24:09 +0000 | [diff] [blame] | 9 | ; CHECK-LABEL: from_clang: |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 10 | ; CHECK: bfi {{w[0-9]+}}, {{w[0-9]+}}, #3, #4 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 11 | |
| 12 | entry: |
| 13 | %f.coerce.fca.0.extract = extractvalue [1 x i64] %f.coerce, 0 |
| 14 | %tmp.sroa.0.0.extract.trunc = trunc i64 %f.coerce.fca.0.extract to i32 |
| 15 | %bf.value = shl i32 %n, 3 |
| 16 | %0 = and i32 %bf.value, 120 |
| 17 | %f.sroa.0.0.insert.ext.masked = and i32 %tmp.sroa.0.0.extract.trunc, 135 |
| 18 | %1 = or i32 %f.sroa.0.0.insert.ext.masked, %0 |
| 19 | %f.sroa.0.0.extract.trunc = zext i32 %1 to i64 |
| 20 | %tmp1.sroa.1.1.insert.insert = and i64 %f.coerce.fca.0.extract, 4294967040 |
| 21 | %tmp1.sroa.0.0.insert.insert = or i64 %f.sroa.0.0.extract.trunc, %tmp1.sroa.1.1.insert.insert |
| 22 | %.fca.0.insert = insertvalue [1 x i64] undef, i64 %tmp1.sroa.0.0.insert.insert, 0 |
| 23 | ret [1 x i64] %.fca.0.insert |
| 24 | } |
| 25 | |
| 26 | define void @test_whole32(i32* %existing, i32* %new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 27 | ; CHECK-LABEL: test_whole32: |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 28 | |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 29 | ; CHECK: bfi {{w[0-9]+}}, {{w[0-9]+}}, #26, #5 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 30 | |
| 31 | %oldval = load volatile i32* %existing |
| 32 | %oldval_keep = and i32 %oldval, 2214592511 ; =0x83ffffff |
| 33 | |
| 34 | %newval = load volatile i32* %new |
| 35 | %newval_shifted = shl i32 %newval, 26 |
| 36 | %newval_masked = and i32 %newval_shifted, 2080374784 ; = 0x7c000000 |
| 37 | |
| 38 | %combined = or i32 %oldval_keep, %newval_masked |
| 39 | store volatile i32 %combined, i32* %existing |
| 40 | |
| 41 | ret void |
| 42 | } |
| 43 | |
| 44 | define void @test_whole64(i64* %existing, i64* %new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 45 | ; CHECK-LABEL: test_whole64: |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 46 | ; CHECK: bfi {{x[0-9]+}}, {{x[0-9]+}}, #26, #14 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 47 | ; CHECK-NOT: and |
| 48 | ; CHECK: ret |
| 49 | |
| 50 | %oldval = load volatile i64* %existing |
| 51 | %oldval_keep = and i64 %oldval, 18446742974265032703 ; = 0xffffff0003ffffffL |
| 52 | |
| 53 | %newval = load volatile i64* %new |
| 54 | %newval_shifted = shl i64 %newval, 26 |
| 55 | %newval_masked = and i64 %newval_shifted, 1099444518912 ; = 0xfffc000000 |
| 56 | |
| 57 | %combined = or i64 %oldval_keep, %newval_masked |
| 58 | store volatile i64 %combined, i64* %existing |
| 59 | |
| 60 | ret void |
| 61 | } |
| 62 | |
| 63 | define void @test_whole32_from64(i64* %existing, i64* %new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 64 | ; CHECK-LABEL: test_whole32_from64: |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 65 | |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 66 | |
Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame^] | 67 | ; CHECK: bfxil {{x[0-9]+}}, {{x[0-9]+}}, #0, #16 |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 68 | |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 69 | ; CHECK: ret |
| 70 | |
| 71 | %oldval = load volatile i64* %existing |
| 72 | %oldval_keep = and i64 %oldval, 4294901760 ; = 0xffff0000 |
| 73 | |
| 74 | %newval = load volatile i64* %new |
| 75 | %newval_masked = and i64 %newval, 65535 ; = 0xffff |
| 76 | |
| 77 | %combined = or i64 %oldval_keep, %newval_masked |
| 78 | store volatile i64 %combined, i64* %existing |
| 79 | |
| 80 | ret void |
| 81 | } |
| 82 | |
| 83 | define void @test_32bit_masked(i32 *%existing, i32 *%new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 84 | ; CHECK-LABEL: test_32bit_masked: |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 85 | |
Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame^] | 86 | ; CHECK: and |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 87 | ; CHECK: bfi [[INSERT:w[0-9]+]], {{w[0-9]+}}, #3, #4 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 88 | |
| 89 | %oldval = load volatile i32* %existing |
| 90 | %oldval_keep = and i32 %oldval, 135 ; = 0x87 |
| 91 | |
| 92 | %newval = load volatile i32* %new |
| 93 | %newval_shifted = shl i32 %newval, 3 |
| 94 | %newval_masked = and i32 %newval_shifted, 120 ; = 0x78 |
| 95 | |
| 96 | %combined = or i32 %oldval_keep, %newval_masked |
| 97 | store volatile i32 %combined, i32* %existing |
| 98 | |
| 99 | ret void |
| 100 | } |
| 101 | |
| 102 | define void @test_64bit_masked(i64 *%existing, i64 *%new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 103 | ; CHECK-LABEL: test_64bit_masked: |
Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame^] | 104 | ; CHECK: and |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 105 | ; CHECK: bfi [[INSERT:x[0-9]+]], {{x[0-9]+}}, #40, #8 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 106 | |
| 107 | %oldval = load volatile i64* %existing |
| 108 | %oldval_keep = and i64 %oldval, 1095216660480 ; = 0xff_0000_0000 |
| 109 | |
| 110 | %newval = load volatile i64* %new |
| 111 | %newval_shifted = shl i64 %newval, 40 |
| 112 | %newval_masked = and i64 %newval_shifted, 280375465082880 ; = 0xff00_0000_0000 |
| 113 | |
| 114 | %combined = or i64 %newval_masked, %oldval_keep |
| 115 | store volatile i64 %combined, i64* %existing |
| 116 | |
| 117 | ret void |
| 118 | } |
| 119 | |
| 120 | ; Mask is too complicated for literal ANDwwi, make sure other avenues are tried. |
| 121 | define void @test_32bit_complexmask(i32 *%existing, i32 *%new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 122 | ; CHECK-LABEL: test_32bit_complexmask: |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 123 | |
Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame^] | 124 | ; CHECK: and |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 125 | ; CHECK: bfi {{w[0-9]+}}, {{w[0-9]+}}, #3, #4 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 126 | |
| 127 | %oldval = load volatile i32* %existing |
| 128 | %oldval_keep = and i32 %oldval, 647 ; = 0x287 |
| 129 | |
| 130 | %newval = load volatile i32* %new |
| 131 | %newval_shifted = shl i32 %newval, 3 |
| 132 | %newval_masked = and i32 %newval_shifted, 120 ; = 0x278 |
| 133 | |
| 134 | %combined = or i32 %oldval_keep, %newval_masked |
| 135 | store volatile i32 %combined, i32* %existing |
| 136 | |
| 137 | ret void |
| 138 | } |
| 139 | |
| 140 | ; Neither mask is is a contiguous set of 1s. BFI can't be used |
| 141 | define void @test_32bit_badmask(i32 *%existing, i32 *%new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 142 | ; CHECK-LABEL: test_32bit_badmask: |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 143 | ; CHECK-NOT: bfi |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 144 | ; CHECK-NOT: bfm |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 145 | ; CHECK: ret |
| 146 | |
| 147 | %oldval = load volatile i32* %existing |
| 148 | %oldval_keep = and i32 %oldval, 135 ; = 0x87 |
| 149 | |
| 150 | %newval = load volatile i32* %new |
| 151 | %newval_shifted = shl i32 %newval, 3 |
| 152 | %newval_masked = and i32 %newval_shifted, 632 ; = 0x278 |
| 153 | |
| 154 | %combined = or i32 %oldval_keep, %newval_masked |
| 155 | store volatile i32 %combined, i32* %existing |
| 156 | |
| 157 | ret void |
| 158 | } |
| 159 | |
| 160 | ; Ditto |
| 161 | define void @test_64bit_badmask(i64 *%existing, i64 *%new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 162 | ; CHECK-LABEL: test_64bit_badmask: |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 163 | ; CHECK-NOT: bfi |
Tim Northover | eb6611e | 2014-04-24 12:11:53 +0000 | [diff] [blame] | 164 | ; CHECK-NOT: bfm |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 165 | ; CHECK: ret |
| 166 | |
| 167 | %oldval = load volatile i64* %existing |
| 168 | %oldval_keep = and i64 %oldval, 135 ; = 0x87 |
| 169 | |
| 170 | %newval = load volatile i64* %new |
| 171 | %newval_shifted = shl i64 %newval, 3 |
| 172 | %newval_masked = and i64 %newval_shifted, 664 ; = 0x278 |
| 173 | |
| 174 | %combined = or i64 %oldval_keep, %newval_masked |
| 175 | store volatile i64 %combined, i64* %existing |
| 176 | |
| 177 | ret void |
| 178 | } |
| 179 | |
| 180 | ; Bitfield insert where there's a left-over shr needed at the beginning |
| 181 | ; (e.g. result of str.bf1 = str.bf2) |
| 182 | define void @test_32bit_with_shr(i32* %existing, i32* %new) { |
Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 183 | ; CHECK-LABEL: test_32bit_with_shr: |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 184 | |
| 185 | %oldval = load volatile i32* %existing |
| 186 | %oldval_keep = and i32 %oldval, 2214592511 ; =0x83ffffff |
| 187 | |
| 188 | %newval = load i32* %new |
| 189 | %newval_shifted = shl i32 %newval, 12 |
| 190 | %newval_masked = and i32 %newval_shifted, 2080374784 ; = 0x7c000000 |
| 191 | |
| 192 | %combined = or i32 %oldval_keep, %newval_masked |
| 193 | store volatile i32 %combined, i32* %existing |
| 194 | ; CHECK: lsr [[BIT:w[0-9]+]], {{w[0-9]+}}, #14 |
Tim Northover | 534acbd | 2014-05-01 12:29:38 +0000 | [diff] [blame] | 195 | ; CHECK: bfi {{w[0-9]+}}, [[BIT]], #26, #5 |
Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 196 | |
| 197 | ret void |
| 198 | } |