Matt Arsenault | 70b9282 | 2017-11-12 23:53:44 +0000 | [diff] [blame] | 1 | ; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -enable-var-scope -check-prefix=SI -check-prefix=FUNC %s |
| 2 | ; RUN: llc -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -enable-var-scope -check-prefix=SI -check-prefix=FUNC %s |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 3 | |
Konstantin Zhuravlyov | f74fc60 | 2016-10-07 14:22:58 +0000 | [diff] [blame] | 4 | declare i32 @llvm.amdgcn.workitem.id.x() nounwind readnone |
| 5 | |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 6 | ; FUNC-LABEL {{^}}sextload_i1_to_i32_trunc_cmp_eq_0: |
| 7 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 8 | ; SI: v_and_b32_e32 [[TMP:v[0-9]+]], 1, [[LOAD]] |
Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 9 | ; SI: v_cmp_eq_u32_e32 vcc, 0, [[TMP]]{{$}} |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 10 | ; SI: v_cndmask_b32_e64 |
| 11 | ; SI: buffer_store_byte |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 12 | define amdgpu_kernel void @sextload_i1_to_i32_trunc_cmp_eq_0(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 13 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 14 | %ext = sext i1 %load to i32 |
| 15 | %cmp = icmp eq i32 %ext, 0 |
| 16 | store i1 %cmp, i1 addrspace(1)* %out |
| 17 | ret void |
| 18 | } |
| 19 | |
| 20 | ; FIXME: The negate should be inverting the compare. |
| 21 | ; FUNC-LABEL: {{^}}zextload_i1_to_i32_trunc_cmp_eq_0: |
| 22 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 23 | ; SI: v_and_b32_e32 [[TMP:v[0-9]+]], 1, [[LOAD]] |
Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 24 | ; SI: v_cmp_eq_u32_e32 vcc, 1, [[TMP]]{{$}} |
Matt Arsenault | f5b2cd8 | 2015-03-23 18:45:30 +0000 | [diff] [blame] | 25 | ; SI-NEXT: s_xor_b64 [[NEG:s\[[0-9]+:[0-9]+\]]], vcc, -1 |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 26 | ; SI-NEXT: v_cndmask_b32_e64 [[RESULT:v[0-9]+]], 0, 1, [[NEG]] |
Tom Stellard | 0bc954e | 2016-03-30 16:35:09 +0000 | [diff] [blame] | 27 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 28 | define amdgpu_kernel void @zextload_i1_to_i32_trunc_cmp_eq_0(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 29 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 30 | %ext = zext i1 %load to i32 |
| 31 | %cmp = icmp eq i32 %ext, 0 |
| 32 | store i1 %cmp, i1 addrspace(1)* %out |
| 33 | ret void |
| 34 | } |
| 35 | |
| 36 | ; FUNC-LABEL: {{^}}sextload_i1_to_i32_trunc_cmp_eq_1: |
| 37 | ; SI: v_mov_b32_e32 [[RESULT:v[0-9]+]], 0{{$}} |
Tom Stellard | f6afc80 | 2015-02-04 23:14:18 +0000 | [diff] [blame] | 38 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 39 | define amdgpu_kernel void @sextload_i1_to_i32_trunc_cmp_eq_1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 40 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 41 | %ext = sext i1 %load to i32 |
| 42 | %cmp = icmp eq i32 %ext, 1 |
| 43 | store i1 %cmp, i1 addrspace(1)* %out |
| 44 | ret void |
| 45 | } |
| 46 | |
| 47 | ; FUNC-LABEL: {{^}}zextload_i1_to_i32_trunc_cmp_eq_1: |
| 48 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 49 | ; SI: v_and_b32_e32 [[RESULT:v[0-9]+]], 1, [[LOAD]] |
Tom Stellard | 0bc954e | 2016-03-30 16:35:09 +0000 | [diff] [blame] | 50 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 51 | define amdgpu_kernel void @zextload_i1_to_i32_trunc_cmp_eq_1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 52 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 53 | %ext = zext i1 %load to i32 |
| 54 | %cmp = icmp eq i32 %ext, 1 |
| 55 | store i1 %cmp, i1 addrspace(1)* %out |
| 56 | ret void |
| 57 | } |
| 58 | |
| 59 | ; FUNC-LABEL: {{^}}sextload_i1_to_i32_trunc_cmp_eq_neg1: |
| 60 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 61 | ; SI: v_and_b32_e32 [[RESULT:v[0-9]+]], 1, [[LOAD]] |
Tom Stellard | 0bc954e | 2016-03-30 16:35:09 +0000 | [diff] [blame] | 62 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 63 | define amdgpu_kernel void @sextload_i1_to_i32_trunc_cmp_eq_neg1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 64 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 65 | %ext = sext i1 %load to i32 |
| 66 | %cmp = icmp eq i32 %ext, -1 |
| 67 | store i1 %cmp, i1 addrspace(1)* %out |
| 68 | ret void |
| 69 | } |
| 70 | |
| 71 | ; FUNC-LABEL: {{^}}zextload_i1_to_i32_trunc_cmp_eq_neg1: |
| 72 | ; SI: v_mov_b32_e32 [[RESULT:v[0-9]+]], 0{{$}} |
Tom Stellard | f6afc80 | 2015-02-04 23:14:18 +0000 | [diff] [blame] | 73 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 74 | define amdgpu_kernel void @zextload_i1_to_i32_trunc_cmp_eq_neg1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 75 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 76 | %ext = zext i1 %load to i32 |
| 77 | %cmp = icmp eq i32 %ext, -1 |
| 78 | store i1 %cmp, i1 addrspace(1)* %out |
| 79 | ret void |
| 80 | } |
| 81 | |
| 82 | |
| 83 | ; FUNC-LABEL {{^}}sextload_i1_to_i32_trunc_cmp_ne_0: |
| 84 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 85 | ; SI: v_and_b32_e32 [[TMP:v[0-9]+]], 1, [[LOAD]] |
Tom Stellard | 0bc954e | 2016-03-30 16:35:09 +0000 | [diff] [blame] | 86 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 87 | define amdgpu_kernel void @sextload_i1_to_i32_trunc_cmp_ne_0(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 88 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 89 | %ext = sext i1 %load to i32 |
| 90 | %cmp = icmp ne i32 %ext, 0 |
| 91 | store i1 %cmp, i1 addrspace(1)* %out |
| 92 | ret void |
| 93 | } |
| 94 | |
| 95 | ; FUNC-LABEL: {{^}}zextload_i1_to_i32_trunc_cmp_ne_0: |
| 96 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
Matt Arsenault | 70b9282 | 2017-11-12 23:53:44 +0000 | [diff] [blame] | 97 | ; SI: v_and_b32_e32 [[RESULT:v[0-9]+]], 1, [[LOAD]] |
Tom Stellard | 0bc954e | 2016-03-30 16:35:09 +0000 | [diff] [blame] | 98 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 99 | define amdgpu_kernel void @zextload_i1_to_i32_trunc_cmp_ne_0(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 100 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 101 | %ext = zext i1 %load to i32 |
| 102 | %cmp = icmp ne i32 %ext, 0 |
| 103 | store i1 %cmp, i1 addrspace(1)* %out |
| 104 | ret void |
| 105 | } |
| 106 | |
| 107 | ; FUNC-LABEL: {{^}}sextload_i1_to_i32_trunc_cmp_ne_1: |
| 108 | ; SI: v_mov_b32_e32 [[RESULT:v[0-9]+]], 1{{$}} |
Tom Stellard | f6afc80 | 2015-02-04 23:14:18 +0000 | [diff] [blame] | 109 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 110 | define amdgpu_kernel void @sextload_i1_to_i32_trunc_cmp_ne_1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 111 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 112 | %ext = sext i1 %load to i32 |
| 113 | %cmp = icmp ne i32 %ext, 1 |
| 114 | store i1 %cmp, i1 addrspace(1)* %out |
| 115 | ret void |
| 116 | } |
| 117 | |
| 118 | ; FUNC-LABEL: {{^}}zextload_i1_to_i32_trunc_cmp_ne_1: |
| 119 | ; SI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 120 | ; SI: v_and_b32_e32 [[TMP:v[0-9]+]], 1, [[LOAD]] |
Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 121 | ; SI: v_cmp_eq_u32_e32 vcc, 1, [[TMP]]{{$}} |
Matt Arsenault | f5b2cd8 | 2015-03-23 18:45:30 +0000 | [diff] [blame] | 122 | ; SI-NEXT: s_xor_b64 [[NEG:s\[[0-9]+:[0-9]+\]]], vcc, -1 |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 123 | ; SI-NEXT: v_cndmask_b32_e64 [[RESULT:v[0-9]+]], 0, 1, [[NEG]] |
Tom Stellard | 0bc954e | 2016-03-30 16:35:09 +0000 | [diff] [blame] | 124 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 125 | define amdgpu_kernel void @zextload_i1_to_i32_trunc_cmp_ne_1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 126 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 127 | %ext = zext i1 %load to i32 |
| 128 | %cmp = icmp ne i32 %ext, 1 |
| 129 | store i1 %cmp, i1 addrspace(1)* %out |
| 130 | ret void |
| 131 | } |
| 132 | |
| 133 | ; FIXME: This should be one compare. |
| 134 | ; FUNC-LABEL: {{^}}sextload_i1_to_i32_trunc_cmp_ne_neg1: |
| 135 | ; XSI: buffer_load_ubyte [[LOAD:v[0-9]+]] |
| 136 | ; XSI: v_and_b32_e32 [[TMP:v[0-9]+]], 1, [[LOAD]] |
Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 137 | ; XSI: v_cmp_eq_u32_e64 [[CMP0:s\[[0-9]+:[0-9]+\]]], [[TMP]], 0{{$}} |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 138 | ; XSI-NEXT: v_cndmask_b32_e64 [[RESULT:v[0-9]+]], 0, 1, [[CMP0]] |
| 139 | ; XSI-NEXT: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 140 | define amdgpu_kernel void @sextload_i1_to_i32_trunc_cmp_ne_neg1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 141 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 142 | %ext = sext i1 %load to i32 |
| 143 | %cmp = icmp ne i32 %ext, -1 |
| 144 | store i1 %cmp, i1 addrspace(1)* %out |
| 145 | ret void |
| 146 | } |
| 147 | |
| 148 | ; FUNC-LABEL: {{^}}zextload_i1_to_i32_trunc_cmp_ne_neg1: |
| 149 | ; SI: v_mov_b32_e32 [[RESULT:v[0-9]+]], 1{{$}} |
Tom Stellard | f6afc80 | 2015-02-04 23:14:18 +0000 | [diff] [blame] | 150 | ; SI: buffer_store_byte [[RESULT]] |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 151 | define amdgpu_kernel void @zextload_i1_to_i32_trunc_cmp_ne_neg1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 152 | %load = load i1, i1 addrspace(1)* %in |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 153 | %ext = zext i1 %load to i32 |
| 154 | %cmp = icmp ne i32 %ext, -1 |
| 155 | store i1 %cmp, i1 addrspace(1)* %out |
| 156 | ret void |
| 157 | } |
| 158 | |
Konstantin Zhuravlyov | f74fc60 | 2016-10-07 14:22:58 +0000 | [diff] [blame] | 159 | ; FIXME: Need to handle non-uniform case for function below (load without gep). |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 160 | ; FUNC-LABEL: {{^}}masked_load_i1_to_i32_trunc_cmp_ne_neg1: |
Konstantin Zhuravlyov | f74fc60 | 2016-10-07 14:22:58 +0000 | [diff] [blame] | 161 | ; SI: {{buffer|flat}}_load_sbyte [[LOAD:v[0-9]+]] |
Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 162 | ; SI: v_cmp_ne_u32_e32 vcc, -1, [[LOAD]]{{$}} |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 163 | ; SI-NEXT: v_cndmask_b32_e64 |
Konstantin Zhuravlyov | f74fc60 | 2016-10-07 14:22:58 +0000 | [diff] [blame] | 164 | ; SI: {{buffer|flat}}_store_byte |
Matt Arsenault | 3dbeefa | 2017-03-21 21:39:51 +0000 | [diff] [blame] | 165 | define amdgpu_kernel void @masked_load_i1_to_i32_trunc_cmp_ne_neg1(i1 addrspace(1)* %out, i8 addrspace(1)* %in) nounwind { |
Konstantin Zhuravlyov | f74fc60 | 2016-10-07 14:22:58 +0000 | [diff] [blame] | 166 | %tid.x = call i32 @llvm.amdgcn.workitem.id.x() |
| 167 | %in.ptr = getelementptr i8, i8 addrspace(1)* %in, i32 %tid.x |
| 168 | %load = load i8, i8 addrspace(1)* %in.ptr |
Matt Arsenault | 22b4c25 | 2014-12-21 16:48:42 +0000 | [diff] [blame] | 169 | %masked = and i8 %load, 255 |
| 170 | %ext = sext i8 %masked to i32 |
| 171 | %cmp = icmp ne i32 %ext, -1 |
| 172 | store i1 %cmp, i1 addrspace(1)* %out |
| 173 | ret void |
| 174 | } |