Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 1 | ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 2 | ; RUN: opt < %s -instsimplify -S | FileCheck %s |
Davide Italiano | 16284df | 2016-07-07 21:14:36 +0000 | [diff] [blame] | 3 | ; RUN: opt < %s -passes=instsimplify -S | FileCheck %s |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 4 | |
| 5 | declare {i8, i1} @llvm.uadd.with.overflow.i8(i8 %a, i8 %b) |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 6 | declare {i8, i1} @llvm.sadd.with.overflow.i8(i8 %a, i8 %b) |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 7 | declare {i8, i1} @llvm.usub.with.overflow.i8(i8 %a, i8 %b) |
| 8 | declare {i8, i1} @llvm.ssub.with.overflow.i8(i8 %a, i8 %b) |
| 9 | declare {i8, i1} @llvm.umul.with.overflow.i8(i8 %a, i8 %b) |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 10 | declare {i8, i1} @llvm.smul.with.overflow.i8(i8 %a, i8 %b) |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 11 | |
| 12 | define i1 @test_uadd1() { |
Stephen Lin | c1c7a13 | 2013-07-14 01:42:54 +0000 | [diff] [blame] | 13 | ; CHECK-LABEL: @test_uadd1( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 14 | ; CHECK-NEXT: ret i1 true |
| 15 | ; |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 16 | %x = call {i8, i1} @llvm.uadd.with.overflow.i8(i8 254, i8 3) |
| 17 | %overflow = extractvalue {i8, i1} %x, 1 |
| 18 | ret i1 %overflow |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 19 | } |
| 20 | |
| 21 | define i8 @test_uadd2() { |
Stephen Lin | c1c7a13 | 2013-07-14 01:42:54 +0000 | [diff] [blame] | 22 | ; CHECK-LABEL: @test_uadd2( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 23 | ; CHECK-NEXT: ret i8 42 |
| 24 | ; |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 25 | %x = call {i8, i1} @llvm.uadd.with.overflow.i8(i8 254, i8 44) |
| 26 | %result = extractvalue {i8, i1} %x, 0 |
| 27 | ret i8 %result |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 28 | } |
| 29 | |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 30 | define {i8, i1} @test_uadd3(i8 %v) { |
| 31 | ; CHECK-LABEL: @test_uadd3( |
| 32 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 33 | ; |
| 34 | %result = call {i8, i1} @llvm.uadd.with.overflow.i8(i8 %v, i8 undef) |
| 35 | ret {i8, i1} %result |
| 36 | } |
| 37 | |
Craig Topper | 77e07cc | 2017-05-24 17:05:28 +0000 | [diff] [blame] | 38 | define {i8, i1} @test_uadd4(i8 %v) { |
| 39 | ; CHECK-LABEL: @test_uadd4( |
| 40 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 41 | ; |
| 42 | %result = call {i8, i1} @llvm.uadd.with.overflow.i8(i8 undef, i8 %v) |
| 43 | ret {i8, i1} %result |
| 44 | } |
| 45 | |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 46 | define i1 @test_sadd1() { |
| 47 | ; CHECK-LABEL: @test_sadd1( |
| 48 | ; CHECK-NEXT: ret i1 true |
| 49 | ; |
| 50 | %x = call {i8, i1} @llvm.sadd.with.overflow.i8(i8 126, i8 3) |
| 51 | %overflow = extractvalue {i8, i1} %x, 1 |
| 52 | ret i1 %overflow |
| 53 | } |
| 54 | |
| 55 | define i8 @test_sadd2() { |
| 56 | ; CHECK-LABEL: @test_sadd2( |
| 57 | ; CHECK-NEXT: ret i8 -86 |
| 58 | ; |
| 59 | %x = call {i8, i1} @llvm.sadd.with.overflow.i8(i8 126, i8 44) |
| 60 | %result = extractvalue {i8, i1} %x, 0 |
| 61 | ret i8 %result |
| 62 | } |
| 63 | |
| 64 | define {i8, i1} @test_sadd3(i8 %v) { |
| 65 | ; CHECK-LABEL: @test_sadd3( |
| 66 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 67 | ; |
| 68 | %result = call {i8, i1} @llvm.sadd.with.overflow.i8(i8 %v, i8 undef) |
| 69 | ret {i8, i1} %result |
| 70 | } |
| 71 | |
Craig Topper | 77e07cc | 2017-05-24 17:05:28 +0000 | [diff] [blame] | 72 | define {i8, i1} @test_sadd4(i8 %v) { |
| 73 | ; CHECK-LABEL: @test_sadd4( |
| 74 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 75 | ; |
| 76 | %result = call {i8, i1} @llvm.sadd.with.overflow.i8(i8 undef, i8 %v) |
| 77 | ret {i8, i1} %result |
| 78 | } |
| 79 | |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 80 | define {i8, i1} @test_usub1(i8 %V) { |
| 81 | ; CHECK-LABEL: @test_usub1( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 82 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 83 | ; |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 84 | %x = call {i8, i1} @llvm.usub.with.overflow.i8(i8 %V, i8 %V) |
| 85 | ret {i8, i1} %x |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 86 | } |
| 87 | |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 88 | define {i8, i1} @test_usub2(i8 %V) { |
| 89 | ; CHECK-LABEL: @test_usub2( |
| 90 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 91 | ; |
| 92 | %x = call {i8, i1} @llvm.usub.with.overflow.i8(i8 %V, i8 undef) |
| 93 | ret {i8, i1} %x |
| 94 | } |
| 95 | |
| 96 | define {i8, i1} @test_usub3(i8 %V) { |
| 97 | ; CHECK-LABEL: @test_usub3( |
| 98 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 99 | ; |
| 100 | %x = call {i8, i1} @llvm.usub.with.overflow.i8(i8 undef, i8 %V) |
| 101 | ret {i8, i1} %x |
| 102 | } |
| 103 | |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 104 | define {i8, i1} @test_ssub1(i8 %V) { |
| 105 | ; CHECK-LABEL: @test_ssub1( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 106 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 107 | ; |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 108 | %x = call {i8, i1} @llvm.ssub.with.overflow.i8(i8 %V, i8 %V) |
| 109 | ret {i8, i1} %x |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 110 | } |
| 111 | |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 112 | define {i8, i1} @test_ssub2(i8 %V) { |
| 113 | ; CHECK-LABEL: @test_ssub2( |
| 114 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 115 | ; |
| 116 | %x = call {i8, i1} @llvm.ssub.with.overflow.i8(i8 %V, i8 undef) |
| 117 | ret {i8, i1} %x |
| 118 | } |
| 119 | |
| 120 | define {i8, i1} @test_ssub3(i8 %V) { |
| 121 | ; CHECK-LABEL: @test_ssub3( |
| 122 | ; CHECK-NEXT: ret { i8, i1 } undef |
| 123 | ; |
| 124 | %x = call {i8, i1} @llvm.ssub.with.overflow.i8(i8 undef, i8 %V) |
| 125 | ret {i8, i1} %x |
| 126 | } |
| 127 | |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 128 | define {i8, i1} @test_umul1(i8 %V) { |
| 129 | ; CHECK-LABEL: @test_umul1( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 130 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 131 | ; |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 132 | %x = call {i8, i1} @llvm.umul.with.overflow.i8(i8 %V, i8 0) |
| 133 | ret {i8, i1} %x |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 134 | } |
| 135 | |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 136 | define {i8, i1} @test_umul2(i8 %V) { |
| 137 | ; CHECK-LABEL: @test_umul2( |
| 138 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 139 | ; |
| 140 | %x = call {i8, i1} @llvm.umul.with.overflow.i8(i8 %V, i8 undef) |
| 141 | ret {i8, i1} %x |
| 142 | } |
| 143 | |
Craig Topper | 77e07cc | 2017-05-24 17:05:28 +0000 | [diff] [blame] | 144 | define {i8, i1} @test_umul3(i8 %V) { |
| 145 | ; CHECK-LABEL: @test_umul3( |
| 146 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 147 | ; |
| 148 | %x = call {i8, i1} @llvm.umul.with.overflow.i8(i8 0, i8 %V) |
| 149 | ret {i8, i1} %x |
| 150 | } |
| 151 | |
| 152 | define {i8, i1} @test_umul4(i8 %V) { |
| 153 | ; CHECK-LABEL: @test_umul4( |
| 154 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 155 | ; |
| 156 | %x = call {i8, i1} @llvm.umul.with.overflow.i8(i8 undef, i8 %V) |
| 157 | ret {i8, i1} %x |
| 158 | } |
| 159 | |
Craig Topper | ae292aa | 2017-05-23 18:42:58 +0000 | [diff] [blame] | 160 | define {i8, i1} @test_smul1(i8 %V) { |
| 161 | ; CHECK-LABEL: @test_smul1( |
| 162 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 163 | ; |
| 164 | %x = call {i8, i1} @llvm.smul.with.overflow.i8(i8 %V, i8 0) |
| 165 | ret {i8, i1} %x |
| 166 | } |
| 167 | |
| 168 | define {i8, i1} @test_smul2(i8 %V) { |
| 169 | ; CHECK-LABEL: @test_smul2( |
| 170 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 171 | ; |
| 172 | %x = call {i8, i1} @llvm.smul.with.overflow.i8(i8 %V, i8 undef) |
| 173 | ret {i8, i1} %x |
| 174 | } |
| 175 | |
Craig Topper | 77e07cc | 2017-05-24 17:05:28 +0000 | [diff] [blame] | 176 | define {i8, i1} @test_smul3(i8 %V) { |
| 177 | ; CHECK-LABEL: @test_smul3( |
| 178 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 179 | ; |
| 180 | %x = call {i8, i1} @llvm.smul.with.overflow.i8(i8 0, i8 %V) |
| 181 | ret {i8, i1} %x |
| 182 | } |
| 183 | |
| 184 | define {i8, i1} @test_smul4(i8 %V) { |
| 185 | ; CHECK-LABEL: @test_smul4( |
| 186 | ; CHECK-NEXT: ret { i8, i1 } zeroinitializer |
| 187 | ; |
| 188 | %x = call {i8, i1} @llvm.smul.with.overflow.i8(i8 undef, i8 %V) |
| 189 | ret {i8, i1} %x |
| 190 | } |
| 191 | |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 192 | ; Test a non-intrinsic that we know about as a library call. |
| 193 | declare float @fabs(float %x) |
| 194 | |
| 195 | define float @test_fabs_libcall() { |
Stephen Lin | c1c7a13 | 2013-07-14 01:42:54 +0000 | [diff] [blame] | 196 | ; CHECK-LABEL: @test_fabs_libcall( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 197 | ; CHECK-NEXT: [[X:%.*]] = call float @fabs(float -4.200000e+01) |
| 198 | ; CHECK-NEXT: ret float 4.200000e+01 |
| 199 | ; |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 200 | |
| 201 | %x = call float @fabs(float -42.0) |
| 202 | ; This is still a real function call, so instsimplify won't nuke it -- other |
| 203 | ; passes have to do that. |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 204 | |
| 205 | ret float %x |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 206 | } |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 207 | |
| 208 | |
| 209 | declare float @llvm.fabs.f32(float) nounwind readnone |
| 210 | declare float @llvm.floor.f32(float) nounwind readnone |
| 211 | declare float @llvm.ceil.f32(float) nounwind readnone |
| 212 | declare float @llvm.trunc.f32(float) nounwind readnone |
| 213 | declare float @llvm.rint.f32(float) nounwind readnone |
| 214 | declare float @llvm.nearbyint.f32(float) nounwind readnone |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 215 | declare float @llvm.canonicalize.f32(float) nounwind readnone |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 216 | |
| 217 | ; Test idempotent intrinsics |
| 218 | define float @test_idempotence(float %a) { |
Stephen Lin | c1c7a13 | 2013-07-14 01:42:54 +0000 | [diff] [blame] | 219 | ; CHECK-LABEL: @test_idempotence( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 220 | ; CHECK-NEXT: [[A0:%.*]] = call float @llvm.fabs.f32(float [[A:%.*]]) |
| 221 | ; CHECK-NEXT: [[B0:%.*]] = call float @llvm.floor.f32(float [[A]]) |
| 222 | ; CHECK-NEXT: [[C0:%.*]] = call float @llvm.ceil.f32(float [[A]]) |
| 223 | ; CHECK-NEXT: [[D0:%.*]] = call float @llvm.trunc.f32(float [[A]]) |
| 224 | ; CHECK-NEXT: [[E0:%.*]] = call float @llvm.rint.f32(float [[A]]) |
| 225 | ; CHECK-NEXT: [[F0:%.*]] = call float @llvm.nearbyint.f32(float [[A]]) |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 226 | ; CHECK-NEXT: [[G0:%.*]] = call float @llvm.canonicalize.f32(float [[A]]) |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 227 | ; CHECK-NEXT: [[R0:%.*]] = fadd float [[A0]], [[B0]] |
| 228 | ; CHECK-NEXT: [[R1:%.*]] = fadd float [[R0]], [[C0]] |
| 229 | ; CHECK-NEXT: [[R2:%.*]] = fadd float [[R1]], [[D0]] |
| 230 | ; CHECK-NEXT: [[R3:%.*]] = fadd float [[R2]], [[E0]] |
| 231 | ; CHECK-NEXT: [[R4:%.*]] = fadd float [[R3]], [[F0]] |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 232 | ; CHECK-NEXT: [[R5:%.*]] = fadd float [[R4]], [[G0]] |
| 233 | ; CHECK-NEXT: ret float [[R5]] |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 234 | ; |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 235 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 236 | %a0 = call float @llvm.fabs.f32(float %a) |
| 237 | %a1 = call float @llvm.fabs.f32(float %a0) |
| 238 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 239 | %b0 = call float @llvm.floor.f32(float %a) |
| 240 | %b1 = call float @llvm.floor.f32(float %b0) |
| 241 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 242 | %c0 = call float @llvm.ceil.f32(float %a) |
| 243 | %c1 = call float @llvm.ceil.f32(float %c0) |
| 244 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 245 | %d0 = call float @llvm.trunc.f32(float %a) |
| 246 | %d1 = call float @llvm.trunc.f32(float %d0) |
| 247 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 248 | %e0 = call float @llvm.rint.f32(float %a) |
| 249 | %e1 = call float @llvm.rint.f32(float %e0) |
| 250 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 251 | %f0 = call float @llvm.nearbyint.f32(float %a) |
| 252 | %f1 = call float @llvm.nearbyint.f32(float %f0) |
| 253 | |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 254 | %g0 = call float @llvm.canonicalize.f32(float %a) |
| 255 | %g1 = call float @llvm.canonicalize.f32(float %g0) |
| 256 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 257 | %r0 = fadd float %a1, %b1 |
| 258 | %r1 = fadd float %r0, %c1 |
| 259 | %r2 = fadd float %r1, %d1 |
| 260 | %r3 = fadd float %r2, %e1 |
| 261 | %r4 = fadd float %r3, %f1 |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 262 | %r5 = fadd float %r4, %g1 |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 263 | |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 264 | ret float %r5 |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 265 | } |
Benjamin Kramer | fd4777c | 2013-09-24 16:37:51 +0000 | [diff] [blame] | 266 | |
| 267 | define i8* @operator_new() { |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 268 | ; CHECK-LABEL: @operator_new( |
| 269 | ; CHECK-NEXT: entry: |
| 270 | ; CHECK-NEXT: [[CALL:%.*]] = tail call noalias i8* @_Znwm(i64 8) |
| 271 | ; CHECK-NEXT: br i1 false, label [[CAST_END:%.*]], label [[CAST_NOTNULL:%.*]] |
| 272 | ; CHECK: cast.notnull: |
| 273 | ; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i8, i8* [[CALL]], i64 4 |
| 274 | ; CHECK-NEXT: br label [[CAST_END]] |
| 275 | ; CHECK: cast.end: |
| 276 | ; CHECK-NEXT: [[CAST_RESULT:%.*]] = phi i8* [ [[ADD_PTR]], [[CAST_NOTNULL]] ], [ null, [[ENTRY:%.*]] ] |
| 277 | ; CHECK-NEXT: ret i8* [[CAST_RESULT]] |
| 278 | ; |
Benjamin Kramer | fd4777c | 2013-09-24 16:37:51 +0000 | [diff] [blame] | 279 | entry: |
| 280 | %call = tail call noalias i8* @_Znwm(i64 8) |
| 281 | %cmp = icmp eq i8* %call, null |
| 282 | br i1 %cmp, label %cast.end, label %cast.notnull |
| 283 | |
| 284 | cast.notnull: ; preds = %entry |
David Blaikie | 79e6c74 | 2015-02-27 19:29:02 +0000 | [diff] [blame] | 285 | %add.ptr = getelementptr inbounds i8, i8* %call, i64 4 |
Benjamin Kramer | fd4777c | 2013-09-24 16:37:51 +0000 | [diff] [blame] | 286 | br label %cast.end |
| 287 | |
| 288 | cast.end: ; preds = %cast.notnull, %entry |
| 289 | %cast.result = phi i8* [ %add.ptr, %cast.notnull ], [ null, %entry ] |
| 290 | ret i8* %cast.result |
| 291 | |
Benjamin Kramer | fd4777c | 2013-09-24 16:37:51 +0000 | [diff] [blame] | 292 | } |
| 293 | |
Philip Reames | 2466719 | 2016-01-04 22:49:23 +0000 | [diff] [blame] | 294 | declare nonnull noalias i8* @_Znwm(i64) |
Benjamin Kramer | 4d4df04 | 2013-09-24 17:15:14 +0000 | [diff] [blame] | 295 | |
Benjamin Kramer | d59bf25 | 2013-09-24 18:37:49 +0000 | [diff] [blame] | 296 | %"struct.std::nothrow_t" = type { i8 } |
| 297 | @_ZSt7nothrow = external global %"struct.std::nothrow_t" |
| 298 | |
| 299 | define i8* @operator_new_nothrow_t() { |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 300 | ; CHECK-LABEL: @operator_new_nothrow_t( |
| 301 | ; CHECK-NEXT: entry: |
| 302 | ; CHECK-NEXT: [[CALL:%.*]] = tail call noalias i8* @_ZnamRKSt9nothrow_t(i64 8, %"struct.std::nothrow_t"* @_ZSt7nothrow) |
| 303 | ; CHECK-NEXT: [[CMP:%.*]] = icmp eq i8* [[CALL]], null |
| 304 | ; CHECK-NEXT: br i1 [[CMP]], label [[CAST_END:%.*]], label [[CAST_NOTNULL:%.*]] |
| 305 | ; CHECK: cast.notnull: |
| 306 | ; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i8, i8* [[CALL]], i64 4 |
| 307 | ; CHECK-NEXT: br label [[CAST_END]] |
| 308 | ; CHECK: cast.end: |
| 309 | ; CHECK-NEXT: [[CAST_RESULT:%.*]] = phi i8* [ [[ADD_PTR]], [[CAST_NOTNULL]] ], [ null, [[ENTRY:%.*]] ] |
| 310 | ; CHECK-NEXT: ret i8* [[CAST_RESULT]] |
| 311 | ; |
Benjamin Kramer | d59bf25 | 2013-09-24 18:37:49 +0000 | [diff] [blame] | 312 | entry: |
| 313 | %call = tail call noalias i8* @_ZnamRKSt9nothrow_t(i64 8, %"struct.std::nothrow_t"* @_ZSt7nothrow) |
| 314 | %cmp = icmp eq i8* %call, null |
| 315 | br i1 %cmp, label %cast.end, label %cast.notnull |
| 316 | |
| 317 | cast.notnull: ; preds = %entry |
David Blaikie | 79e6c74 | 2015-02-27 19:29:02 +0000 | [diff] [blame] | 318 | %add.ptr = getelementptr inbounds i8, i8* %call, i64 4 |
Benjamin Kramer | d59bf25 | 2013-09-24 18:37:49 +0000 | [diff] [blame] | 319 | br label %cast.end |
| 320 | |
| 321 | cast.end: ; preds = %cast.notnull, %entry |
| 322 | %cast.result = phi i8* [ %add.ptr, %cast.notnull ], [ null, %entry ] |
| 323 | ret i8* %cast.result |
| 324 | |
Benjamin Kramer | d59bf25 | 2013-09-24 18:37:49 +0000 | [diff] [blame] | 325 | } |
| 326 | |
| 327 | declare i8* @_ZnamRKSt9nothrow_t(i64, %"struct.std::nothrow_t"*) nounwind |
| 328 | |
Benjamin Kramer | 4d4df04 | 2013-09-24 17:15:14 +0000 | [diff] [blame] | 329 | define i8* @malloc_can_return_null() { |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 330 | ; CHECK-LABEL: @malloc_can_return_null( |
| 331 | ; CHECK-NEXT: entry: |
| 332 | ; CHECK-NEXT: [[CALL:%.*]] = tail call noalias i8* @malloc(i64 8) |
| 333 | ; CHECK-NEXT: [[CMP:%.*]] = icmp eq i8* [[CALL]], null |
| 334 | ; CHECK-NEXT: br i1 [[CMP]], label [[CAST_END:%.*]], label [[CAST_NOTNULL:%.*]] |
| 335 | ; CHECK: cast.notnull: |
| 336 | ; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i8, i8* [[CALL]], i64 4 |
| 337 | ; CHECK-NEXT: br label [[CAST_END]] |
| 338 | ; CHECK: cast.end: |
| 339 | ; CHECK-NEXT: [[CAST_RESULT:%.*]] = phi i8* [ [[ADD_PTR]], [[CAST_NOTNULL]] ], [ null, [[ENTRY:%.*]] ] |
| 340 | ; CHECK-NEXT: ret i8* [[CAST_RESULT]] |
| 341 | ; |
Benjamin Kramer | 4d4df04 | 2013-09-24 17:15:14 +0000 | [diff] [blame] | 342 | entry: |
| 343 | %call = tail call noalias i8* @malloc(i64 8) |
| 344 | %cmp = icmp eq i8* %call, null |
| 345 | br i1 %cmp, label %cast.end, label %cast.notnull |
| 346 | |
| 347 | cast.notnull: ; preds = %entry |
David Blaikie | 79e6c74 | 2015-02-27 19:29:02 +0000 | [diff] [blame] | 348 | %add.ptr = getelementptr inbounds i8, i8* %call, i64 4 |
Benjamin Kramer | 4d4df04 | 2013-09-24 17:15:14 +0000 | [diff] [blame] | 349 | br label %cast.end |
| 350 | |
| 351 | cast.end: ; preds = %cast.notnull, %entry |
| 352 | %cast.result = phi i8* [ %add.ptr, %cast.notnull ], [ null, %entry ] |
| 353 | ret i8* %cast.result |
| 354 | |
Benjamin Kramer | 4d4df04 | 2013-09-24 17:15:14 +0000 | [diff] [blame] | 355 | } |
| 356 | |
David Majnemer | bb53d23 | 2016-06-25 07:37:30 +0000 | [diff] [blame] | 357 | define i32 @call_null() { |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 358 | ; CHECK-LABEL: @call_null( |
| 359 | ; CHECK-NEXT: entry: |
| 360 | ; CHECK-NEXT: [[CALL:%.*]] = call i32 null() |
| 361 | ; CHECK-NEXT: ret i32 undef |
| 362 | ; |
David Majnemer | bb53d23 | 2016-06-25 07:37:30 +0000 | [diff] [blame] | 363 | entry: |
| 364 | %call = call i32 null() |
| 365 | ret i32 %call |
| 366 | } |
David Majnemer | bb53d23 | 2016-06-25 07:37:30 +0000 | [diff] [blame] | 367 | |
| 368 | define i32 @call_undef() { |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 369 | ; CHECK-LABEL: @call_undef( |
| 370 | ; CHECK-NEXT: entry: |
| 371 | ; CHECK-NEXT: [[CALL:%.*]] = call i32 undef() |
| 372 | ; CHECK-NEXT: ret i32 undef |
| 373 | ; |
David Majnemer | bb53d23 | 2016-06-25 07:37:30 +0000 | [diff] [blame] | 374 | entry: |
| 375 | %call = call i32 undef() |
| 376 | ret i32 %call |
| 377 | } |
David Majnemer | bb53d23 | 2016-06-25 07:37:30 +0000 | [diff] [blame] | 378 | |
David Majnemer | 7f781ab | 2016-07-14 00:29:50 +0000 | [diff] [blame] | 379 | @GV = private constant [8 x i32] [i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49] |
| 380 | |
| 381 | define <8 x i32> @partial_masked_load() { |
| 382 | ; CHECK-LABEL: @partial_masked_load( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 383 | ; CHECK-NEXT: ret <8 x i32> <i32 undef, i32 undef, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47> |
| 384 | ; |
David Majnemer | 7f781ab | 2016-07-14 00:29:50 +0000 | [diff] [blame] | 385 | %masked.load = call <8 x i32> @llvm.masked.load.v8i32.p0v8i32(<8 x i32>* bitcast (i32* getelementptr ([8 x i32], [8 x i32]* @GV, i64 0, i64 -2) to <8 x i32>*), i32 4, <8 x i1> <i1 false, i1 false, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true>, <8 x i32> undef) |
| 386 | ret <8 x i32> %masked.load |
| 387 | } |
| 388 | |
David Majnemer | 17a95aa | 2016-07-14 06:58:37 +0000 | [diff] [blame] | 389 | define <8 x i32> @masked_load_undef_mask(<8 x i32>* %V) { |
| 390 | ; CHECK-LABEL: @masked_load_undef_mask( |
Craig Topper | 15288da | 2017-05-23 17:57:36 +0000 | [diff] [blame] | 391 | ; CHECK-NEXT: ret <8 x i32> <i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0> |
| 392 | ; |
David Majnemer | 17a95aa | 2016-07-14 06:58:37 +0000 | [diff] [blame] | 393 | %masked.load = call <8 x i32> @llvm.masked.load.v8i32.p0v8i32(<8 x i32>* %V, i32 4, <8 x i1> undef, <8 x i32> <i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0>) |
| 394 | ret <8 x i32> %masked.load |
| 395 | } |
| 396 | |
Benjamin Kramer | 4d4df04 | 2013-09-24 17:15:14 +0000 | [diff] [blame] | 397 | declare noalias i8* @malloc(i64) |
David Majnemer | 7f781ab | 2016-07-14 00:29:50 +0000 | [diff] [blame] | 398 | |
| 399 | declare <8 x i32> @llvm.masked.load.v8i32.p0v8i32(<8 x i32>*, i32, <8 x i1>, <8 x i32>) |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 400 | |
| 401 | declare double @llvm.powi.f64(double, i32) |
| 402 | declare <2 x double> @llvm.powi.v2f64(<2 x double>, i32) |
| 403 | |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 404 | define double @constant_fold_powi() { |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 405 | ; CHECK-LABEL: @constant_fold_powi( |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 406 | ; CHECK-NEXT: ret double 9.000000e+00 |
| 407 | ; |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 408 | %t0 = call double @llvm.powi.f64(double 3.00000e+00, i32 2) |
| 409 | ret double %t0 |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 410 | } |
| 411 | |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 412 | define <2 x double> @constant_fold_powi_vec() { |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 413 | ; CHECK-LABEL: @constant_fold_powi_vec( |
Craig Topper | fe9ad82 | 2017-06-04 07:30:28 +0000 | [diff] [blame] | 414 | ; CHECK-NEXT: ret <2 x double> <double 9.000000e+00, double 2.500000e+01> |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 415 | ; |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 416 | %t0 = call <2 x double> @llvm.powi.v2f64(<2 x double> <double 3.00000e+00, double 5.00000e+00>, i32 2) |
| 417 | ret <2 x double> %t0 |
Craig Topper | 97f113e | 2017-06-04 07:30:23 +0000 | [diff] [blame] | 418 | } |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 419 | |
| 420 | declare i8 @llvm.fshl.i8(i8, i8, i8) |
| 421 | declare i9 @llvm.fshr.i9(i9, i9, i9) |
| 422 | declare <2 x i7> @llvm.fshl.v2i7(<2 x i7>, <2 x i7>, <2 x i7>) |
| 423 | declare <2 x i8> @llvm.fshr.v2i8(<2 x i8>, <2 x i8>, <2 x i8>) |
| 424 | |
| 425 | define i8 @fshl_no_shift(i8 %x, i8 %y) { |
| 426 | ; CHECK-LABEL: @fshl_no_shift( |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 427 | ; CHECK-NEXT: ret i8 [[X:%.*]] |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 428 | ; |
| 429 | %z = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 0) |
| 430 | ret i8 %z |
| 431 | } |
| 432 | |
| 433 | define i9 @fshr_no_shift(i9 %x, i9 %y) { |
| 434 | ; CHECK-LABEL: @fshr_no_shift( |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 435 | ; CHECK-NEXT: ret i9 [[Y:%.*]] |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 436 | ; |
| 437 | %z = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 0) |
| 438 | ret i9 %z |
| 439 | } |
| 440 | |
| 441 | define i8 @fshl_no_shift_modulo_bitwidth(i8 %x, i8 %y) { |
| 442 | ; CHECK-LABEL: @fshl_no_shift_modulo_bitwidth( |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 443 | ; CHECK-NEXT: ret i8 [[X:%.*]] |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 444 | ; |
| 445 | %z = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 40) |
| 446 | ret i8 %z |
| 447 | } |
| 448 | |
| 449 | define i9 @fshr_no_shift_modulo_bitwidth(i9 %x, i9 %y) { |
| 450 | ; CHECK-LABEL: @fshr_no_shift_modulo_bitwidth( |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 451 | ; CHECK-NEXT: ret i9 [[Y:%.*]] |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 452 | ; |
| 453 | %z = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 189) |
| 454 | ret i9 %z |
| 455 | } |
| 456 | |
| 457 | define <2 x i7> @fshl_no_shift_modulo_bitwidth_splat(<2 x i7> %x, <2 x i7> %y) { |
| 458 | ; CHECK-LABEL: @fshl_no_shift_modulo_bitwidth_splat( |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 459 | ; CHECK-NEXT: ret <2 x i7> [[X:%.*]] |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 460 | ; |
| 461 | %z = call <2 x i7> @llvm.fshl.v2i7(<2 x i7> %x, <2 x i7> %y, <2 x i7> <i7 21, i7 21>) |
| 462 | ret <2 x i7> %z |
| 463 | } |
| 464 | |
| 465 | define <2 x i8> @fshr_no_shift_modulo_bitwidth_splat(<2 x i8> %x, <2 x i8> %y) { |
| 466 | ; CHECK-LABEL: @fshr_no_shift_modulo_bitwidth_splat( |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 467 | ; CHECK-NEXT: ret <2 x i8> [[Y:%.*]] |
Sanjay Patel | 46af583 | 2018-07-29 16:27:17 +0000 | [diff] [blame] | 468 | ; |
| 469 | %z = call <2 x i8> @llvm.fshr.v2i8(<2 x i8> %x, <2 x i8> %y, <2 x i8> <i8 72, i8 72>) |
| 470 | ret <2 x i8> %z |
| 471 | } |
| 472 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 473 | ; When the shift amount is 0, fshl returns its 1st parameter (x), so the guard is not needed. |
| 474 | |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 475 | define i8 @fshl_zero_shift_guard(i8 %x, i8 %y, i8 %sh) { |
| 476 | ; CHECK-LABEL: @fshl_zero_shift_guard( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 477 | ; CHECK-NEXT: [[F:%.*]] = call i8 @llvm.fshl.i8(i8 [[X:%.*]], i8 [[Y:%.*]], i8 [[SH:%.*]]) |
| 478 | ; CHECK-NEXT: ret i8 [[F]] |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 479 | ; |
| 480 | %c = icmp eq i8 %sh, 0 |
| 481 | %f = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 %sh) |
| 482 | %s = select i1 %c, i8 %x, i8 %f |
| 483 | ret i8 %s |
| 484 | } |
| 485 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 486 | ; When the shift amount is 0, fshl returns its 1st parameter (x), so the guard is not needed. |
| 487 | |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 488 | define i8 @fshl_zero_shift_guard_swapped(i8 %x, i8 %y, i8 %sh) { |
| 489 | ; CHECK-LABEL: @fshl_zero_shift_guard_swapped( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 490 | ; CHECK-NEXT: [[F:%.*]] = call i8 @llvm.fshl.i8(i8 [[X:%.*]], i8 [[Y:%.*]], i8 [[SH:%.*]]) |
| 491 | ; CHECK-NEXT: ret i8 [[F]] |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 492 | ; |
| 493 | %c = icmp ne i8 %sh, 0 |
| 494 | %f = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 %sh) |
| 495 | %s = select i1 %c, i8 %f, i8 %x |
| 496 | ret i8 %s |
| 497 | } |
| 498 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 499 | ; When the shift amount is 0, fshl returns its 1st parameter (x), so everything is deleted. |
| 500 | |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 501 | define i8 @fshl_zero_shift_guard_inverted(i8 %x, i8 %y, i8 %sh) { |
| 502 | ; CHECK-LABEL: @fshl_zero_shift_guard_inverted( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 503 | ; CHECK-NEXT: ret i8 [[X:%.*]] |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 504 | ; |
| 505 | %c = icmp eq i8 %sh, 0 |
| 506 | %f = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 %sh) |
| 507 | %s = select i1 %c, i8 %f, i8 %x |
| 508 | ret i8 %s |
| 509 | } |
| 510 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 511 | ; When the shift amount is 0, fshl returns its 1st parameter (x), so everything is deleted. |
| 512 | |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 513 | define i8 @fshl_zero_shift_guard_inverted_swapped(i8 %x, i8 %y, i8 %sh) { |
| 514 | ; CHECK-LABEL: @fshl_zero_shift_guard_inverted_swapped( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 515 | ; CHECK-NEXT: ret i8 [[X:%.*]] |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 516 | ; |
| 517 | %c = icmp ne i8 %sh, 0 |
| 518 | %f = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 %sh) |
| 519 | %s = select i1 %c, i8 %x, i8 %f |
| 520 | ret i8 %s |
| 521 | } |
| 522 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 523 | ; When the shift amount is 0, fshr returns its 2nd parameter (y), so the guard is not needed. |
| 524 | |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 525 | define i9 @fshr_zero_shift_guard(i9 %x, i9 %y, i9 %sh) { |
| 526 | ; CHECK-LABEL: @fshr_zero_shift_guard( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 527 | ; CHECK-NEXT: [[F:%.*]] = call i9 @llvm.fshr.i9(i9 [[X:%.*]], i9 [[Y:%.*]], i9 [[SH:%.*]]) |
| 528 | ; CHECK-NEXT: ret i9 [[F]] |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 529 | ; |
| 530 | %c = icmp eq i9 %sh, 0 |
| 531 | %f = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 %sh) |
| 532 | %s = select i1 %c, i9 %y, i9 %f |
| 533 | ret i9 %s |
| 534 | } |
| 535 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 536 | ; When the shift amount is 0, fshr returns its 2nd parameter (y), so the guard is not needed. |
| 537 | |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 538 | define i9 @fshr_zero_shift_guard_swapped(i9 %x, i9 %y, i9 %sh) { |
| 539 | ; CHECK-LABEL: @fshr_zero_shift_guard_swapped( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 540 | ; CHECK-NEXT: [[F:%.*]] = call i9 @llvm.fshr.i9(i9 [[X:%.*]], i9 [[Y:%.*]], i9 [[SH:%.*]]) |
| 541 | ; CHECK-NEXT: ret i9 [[F]] |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 542 | ; |
| 543 | %c = icmp ne i9 %sh, 0 |
| 544 | %f = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 %sh) |
| 545 | %s = select i1 %c, i9 %f, i9 %y |
| 546 | ret i9 %s |
| 547 | } |
| 548 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 549 | ; When the shift amount is 0, fshr returns its 2nd parameter (y), so everything is deleted. |
| 550 | |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 551 | define i9 @fshr_zero_shift_guard_inverted(i9 %x, i9 %y, i9 %sh) { |
| 552 | ; CHECK-LABEL: @fshr_zero_shift_guard_inverted( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 553 | ; CHECK-NEXT: ret i9 [[Y:%.*]] |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 554 | ; |
| 555 | %c = icmp eq i9 %sh, 0 |
| 556 | %f = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 %sh) |
| 557 | %s = select i1 %c, i9 %f, i9 %y |
| 558 | ret i9 %s |
| 559 | } |
| 560 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 561 | ; When the shift amount is 0, fshr returns its 2nd parameter (y), so everything is deleted. |
| 562 | |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 563 | define i9 @fshr_zero_shift_guard_inverted_swapped(i9 %x, i9 %y, i9 %sh) { |
| 564 | ; CHECK-LABEL: @fshr_zero_shift_guard_inverted_swapped( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 565 | ; CHECK-NEXT: ret i9 [[Y:%.*]] |
Sanjay Patel | 4832ffe | 2018-11-14 22:34:25 +0000 | [diff] [blame] | 566 | ; |
| 567 | %c = icmp ne i9 %sh, 0 |
| 568 | %f = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 %sh) |
| 569 | %s = select i1 %c, i9 %y, i9 %f |
| 570 | ret i9 %s |
| 571 | } |
| 572 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 573 | ; Negative test - make sure we're matching the correct parameter of fshl. |
| 574 | |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 575 | define i8 @fshl_zero_shift_guard_wrong_select_op(i8 %x, i8 %y, i8 %sh) { |
| 576 | ; CHECK-LABEL: @fshl_zero_shift_guard_wrong_select_op( |
| 577 | ; CHECK-NEXT: [[C:%.*]] = icmp eq i8 [[SH:%.*]], 0 |
| 578 | ; CHECK-NEXT: [[F:%.*]] = call i8 @llvm.fshl.i8(i8 [[X:%.*]], i8 [[Y:%.*]], i8 [[SH]]) |
| 579 | ; CHECK-NEXT: [[S:%.*]] = select i1 [[C]], i8 [[Y]], i8 [[F]] |
| 580 | ; CHECK-NEXT: ret i8 [[S]] |
| 581 | ; |
| 582 | %c = icmp eq i8 %sh, 0 |
| 583 | %f = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 %sh) |
| 584 | %s = select i1 %c, i8 %y, i8 %f |
| 585 | ret i8 %s |
| 586 | } |
| 587 | |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 588 | ; Vector types work too. |
| 589 | |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 590 | define <2 x i8> @fshr_zero_shift_guard_splat(<2 x i8> %x, <2 x i8> %y, <2 x i8> %sh) { |
| 591 | ; CHECK-LABEL: @fshr_zero_shift_guard_splat( |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 592 | ; CHECK-NEXT: [[F:%.*]] = call <2 x i8> @llvm.fshr.v2i8(<2 x i8> [[X:%.*]], <2 x i8> [[Y:%.*]], <2 x i8> [[SH:%.*]]) |
| 593 | ; CHECK-NEXT: ret <2 x i8> [[F]] |
Sanjay Patel | 7d02867 | 2018-11-14 19:12:54 +0000 | [diff] [blame] | 594 | ; |
| 595 | %c = icmp eq <2 x i8> %sh, zeroinitializer |
| 596 | %f = call <2 x i8> @llvm.fshr.v2i8(<2 x i8> %x, <2 x i8> %y, <2 x i8> %sh) |
| 597 | %s = select <2 x i1> %c, <2 x i8> %y, <2 x i8> %f |
| 598 | ret <2 x i8> %s |
| 599 | } |
| 600 | |
Sanjay Patel | 2778f56 | 2018-11-20 17:30:09 +0000 | [diff] [blame] | 601 | ; If first two operands of funnel shift are undef, the result is undef |
| 602 | |
| 603 | define i8 @fshl_ops_undef(i8 %shamt) { |
| 604 | ; CHECK-LABEL: @fshl_ops_undef( |
Sanjay Patel | 14ab917 | 2018-11-20 17:34:59 +0000 | [diff] [blame] | 605 | ; CHECK-NEXT: ret i8 undef |
Sanjay Patel | 2778f56 | 2018-11-20 17:30:09 +0000 | [diff] [blame] | 606 | ; |
| 607 | %r = call i8 @llvm.fshl.i8(i8 undef, i8 undef, i8 %shamt) |
| 608 | ret i8 %r |
| 609 | } |
| 610 | |
| 611 | define i9 @fshr_ops_undef(i9 %shamt) { |
| 612 | ; CHECK-LABEL: @fshr_ops_undef( |
Sanjay Patel | 14ab917 | 2018-11-20 17:34:59 +0000 | [diff] [blame] | 613 | ; CHECK-NEXT: ret i9 undef |
Sanjay Patel | 2778f56 | 2018-11-20 17:30:09 +0000 | [diff] [blame] | 614 | ; |
| 615 | %r = call i9 @llvm.fshr.i9(i9 undef, i9 undef, i9 %shamt) |
| 616 | ret i9 %r |
| 617 | } |
| 618 | |
| 619 | ; If shift amount is undef, treat it as zero, returning operand 0 or 1 |
| 620 | |
| 621 | define i8 @fshl_shift_undef(i8 %x, i8 %y) { |
| 622 | ; CHECK-LABEL: @fshl_shift_undef( |
Sanjay Patel | 14ab917 | 2018-11-20 17:34:59 +0000 | [diff] [blame] | 623 | ; CHECK-NEXT: ret i8 [[X:%.*]] |
Sanjay Patel | 2778f56 | 2018-11-20 17:30:09 +0000 | [diff] [blame] | 624 | ; |
| 625 | %r = call i8 @llvm.fshl.i8(i8 %x, i8 %y, i8 undef) |
| 626 | ret i8 %r |
| 627 | } |
| 628 | |
| 629 | define i9 @fshr_shift_undef(i9 %x, i9 %y) { |
| 630 | ; CHECK-LABEL: @fshr_shift_undef( |
Sanjay Patel | 14ab917 | 2018-11-20 17:34:59 +0000 | [diff] [blame] | 631 | ; CHECK-NEXT: ret i9 [[Y:%.*]] |
Sanjay Patel | 2778f56 | 2018-11-20 17:30:09 +0000 | [diff] [blame] | 632 | ; |
| 633 | %r = call i9 @llvm.fshr.i9(i9 %x, i9 %y, i9 undef) |
| 634 | ret i9 %r |
| 635 | } |
| 636 | |