Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 1 | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| 2 | ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=ANY,SSE,SSE2 |
| 3 | ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse4.1 | FileCheck %s --check-prefixes=ANY,SSE,SSE41 |
| 4 | |
| 5 | ; There are at least 3 potential patterns corresponding to an unsigned saturated add: min, cmp with sum, cmp with not. |
| 6 | ; Test each of those patterns with i8/i16/i32/i64. |
| 7 | ; Test each of those with a constant operand and a variable operand. |
| 8 | ; Test each of those with a 128-bit vector type. |
| 9 | |
| 10 | define i8 @unsigned_sat_constant_i8_using_min(i8 %x) { |
| 11 | ; ANY-LABEL: unsigned_sat_constant_i8_using_min: |
| 12 | ; ANY: # %bb.0: |
Simon Pilgrim | 2d0f20c | 2018-09-19 18:59:08 +0000 | [diff] [blame] | 13 | ; ANY-NEXT: movl %edi, %eax |
| 14 | ; ANY-NEXT: cmpb $-43, %al |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 15 | ; ANY-NEXT: jb .LBB0_2 |
| 16 | ; ANY-NEXT: # %bb.1: |
Simon Pilgrim | 2d0f20c | 2018-09-19 18:59:08 +0000 | [diff] [blame] | 17 | ; ANY-NEXT: movb $-43, %al |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 18 | ; ANY-NEXT: .LBB0_2: |
Simon Pilgrim | 2d0f20c | 2018-09-19 18:59:08 +0000 | [diff] [blame] | 19 | ; ANY-NEXT: addb $42, %al |
| 20 | ; ANY-NEXT: # kill: def $al killed $al killed $eax |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 21 | ; ANY-NEXT: retq |
| 22 | %c = icmp ult i8 %x, -43 |
| 23 | %s = select i1 %c, i8 %x, i8 -43 |
| 24 | %r = add i8 %s, 42 |
| 25 | ret i8 %r |
| 26 | } |
| 27 | |
| 28 | define i8 @unsigned_sat_constant_i8_using_cmp_sum(i8 %x) { |
| 29 | ; ANY-LABEL: unsigned_sat_constant_i8_using_cmp_sum: |
| 30 | ; ANY: # %bb.0: |
| 31 | ; ANY-NEXT: addb $42, %dil |
| 32 | ; ANY-NEXT: movb $-1, %al |
| 33 | ; ANY-NEXT: jb .LBB1_2 |
| 34 | ; ANY-NEXT: # %bb.1: |
| 35 | ; ANY-NEXT: movl %edi, %eax |
| 36 | ; ANY-NEXT: .LBB1_2: |
| 37 | ; ANY-NEXT: retq |
| 38 | %a = add i8 %x, 42 |
| 39 | %c = icmp ugt i8 %x, %a |
| 40 | %r = select i1 %c, i8 -1, i8 %a |
| 41 | ret i8 %r |
| 42 | } |
| 43 | |
| 44 | define i8 @unsigned_sat_constant_i8_using_cmp_notval(i8 %x) { |
| 45 | ; ANY-LABEL: unsigned_sat_constant_i8_using_cmp_notval: |
| 46 | ; ANY: # %bb.0: |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 47 | ; ANY-NEXT: addb $42, %dil |
Sanjay Patel | 2c90174 | 2018-09-24 14:47:15 +0000 | [diff] [blame] | 48 | ; ANY-NEXT: movb $-1, %al |
| 49 | ; ANY-NEXT: jb .LBB2_2 |
| 50 | ; ANY-NEXT: # %bb.1: |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 51 | ; ANY-NEXT: movl %edi, %eax |
| 52 | ; ANY-NEXT: .LBB2_2: |
| 53 | ; ANY-NEXT: retq |
| 54 | %a = add i8 %x, 42 |
| 55 | %c = icmp ugt i8 %x, -43 |
| 56 | %r = select i1 %c, i8 -1, i8 %a |
| 57 | ret i8 %r |
| 58 | } |
| 59 | |
| 60 | define i16 @unsigned_sat_constant_i16_using_min(i16 %x) { |
| 61 | ; ANY-LABEL: unsigned_sat_constant_i16_using_min: |
| 62 | ; ANY: # %bb.0: |
Craig Topper | 0ed892d | 2018-10-05 18:13:36 +0000 | [diff] [blame^] | 63 | ; ANY-NEXT: cmpw $-43, %di |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 64 | ; ANY-NEXT: movl $65493, %eax # imm = 0xFFD5 |
| 65 | ; ANY-NEXT: cmovbl %edi, %eax |
| 66 | ; ANY-NEXT: addl $42, %eax |
| 67 | ; ANY-NEXT: # kill: def $ax killed $ax killed $eax |
| 68 | ; ANY-NEXT: retq |
| 69 | %c = icmp ult i16 %x, -43 |
| 70 | %s = select i1 %c, i16 %x, i16 -43 |
| 71 | %r = add i16 %s, 42 |
| 72 | ret i16 %r |
| 73 | } |
| 74 | |
| 75 | define i16 @unsigned_sat_constant_i16_using_cmp_sum(i16 %x) { |
| 76 | ; ANY-LABEL: unsigned_sat_constant_i16_using_cmp_sum: |
| 77 | ; ANY: # %bb.0: |
| 78 | ; ANY-NEXT: addw $42, %di |
| 79 | ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF |
| 80 | ; ANY-NEXT: cmovael %edi, %eax |
| 81 | ; ANY-NEXT: # kill: def $ax killed $ax killed $eax |
| 82 | ; ANY-NEXT: retq |
| 83 | %a = add i16 %x, 42 |
| 84 | %c = icmp ugt i16 %x, %a |
| 85 | %r = select i1 %c, i16 -1, i16 %a |
| 86 | ret i16 %r |
| 87 | } |
| 88 | |
| 89 | define i16 @unsigned_sat_constant_i16_using_cmp_notval(i16 %x) { |
| 90 | ; ANY-LABEL: unsigned_sat_constant_i16_using_cmp_notval: |
| 91 | ; ANY: # %bb.0: |
Sanjay Patel | 2c90174 | 2018-09-24 14:47:15 +0000 | [diff] [blame] | 92 | ; ANY-NEXT: addw $42, %di |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 93 | ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF |
Sanjay Patel | 2c90174 | 2018-09-24 14:47:15 +0000 | [diff] [blame] | 94 | ; ANY-NEXT: cmovael %edi, %eax |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 95 | ; ANY-NEXT: # kill: def $ax killed $ax killed $eax |
| 96 | ; ANY-NEXT: retq |
| 97 | %a = add i16 %x, 42 |
| 98 | %c = icmp ugt i16 %x, -43 |
| 99 | %r = select i1 %c, i16 -1, i16 %a |
| 100 | ret i16 %r |
| 101 | } |
| 102 | |
| 103 | define i32 @unsigned_sat_constant_i32_using_min(i32 %x) { |
| 104 | ; ANY-LABEL: unsigned_sat_constant_i32_using_min: |
| 105 | ; ANY: # %bb.0: |
| 106 | ; ANY-NEXT: cmpl $-43, %edi |
| 107 | ; ANY-NEXT: movl $-43, %eax |
| 108 | ; ANY-NEXT: cmovbl %edi, %eax |
| 109 | ; ANY-NEXT: addl $42, %eax |
| 110 | ; ANY-NEXT: retq |
| 111 | %c = icmp ult i32 %x, -43 |
| 112 | %s = select i1 %c, i32 %x, i32 -43 |
| 113 | %r = add i32 %s, 42 |
| 114 | ret i32 %r |
| 115 | } |
| 116 | |
| 117 | define i32 @unsigned_sat_constant_i32_using_cmp_sum(i32 %x) { |
| 118 | ; ANY-LABEL: unsigned_sat_constant_i32_using_cmp_sum: |
| 119 | ; ANY: # %bb.0: |
| 120 | ; ANY-NEXT: addl $42, %edi |
| 121 | ; ANY-NEXT: movl $-1, %eax |
| 122 | ; ANY-NEXT: cmovael %edi, %eax |
| 123 | ; ANY-NEXT: retq |
| 124 | %a = add i32 %x, 42 |
| 125 | %c = icmp ugt i32 %x, %a |
| 126 | %r = select i1 %c, i32 -1, i32 %a |
| 127 | ret i32 %r |
| 128 | } |
| 129 | |
| 130 | define i32 @unsigned_sat_constant_i32_using_cmp_notval(i32 %x) { |
| 131 | ; ANY-LABEL: unsigned_sat_constant_i32_using_cmp_notval: |
| 132 | ; ANY: # %bb.0: |
Sanjay Patel | 2c90174 | 2018-09-24 14:47:15 +0000 | [diff] [blame] | 133 | ; ANY-NEXT: addl $42, %edi |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 134 | ; ANY-NEXT: movl $-1, %eax |
Sanjay Patel | 2c90174 | 2018-09-24 14:47:15 +0000 | [diff] [blame] | 135 | ; ANY-NEXT: cmovael %edi, %eax |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 136 | ; ANY-NEXT: retq |
| 137 | %a = add i32 %x, 42 |
| 138 | %c = icmp ugt i32 %x, -43 |
| 139 | %r = select i1 %c, i32 -1, i32 %a |
| 140 | ret i32 %r |
| 141 | } |
| 142 | |
| 143 | define i64 @unsigned_sat_constant_i64_using_min(i64 %x) { |
| 144 | ; ANY-LABEL: unsigned_sat_constant_i64_using_min: |
| 145 | ; ANY: # %bb.0: |
| 146 | ; ANY-NEXT: cmpq $-43, %rdi |
| 147 | ; ANY-NEXT: movq $-43, %rax |
| 148 | ; ANY-NEXT: cmovbq %rdi, %rax |
| 149 | ; ANY-NEXT: addq $42, %rax |
| 150 | ; ANY-NEXT: retq |
| 151 | %c = icmp ult i64 %x, -43 |
| 152 | %s = select i1 %c, i64 %x, i64 -43 |
| 153 | %r = add i64 %s, 42 |
| 154 | ret i64 %r |
| 155 | } |
| 156 | |
| 157 | define i64 @unsigned_sat_constant_i64_using_cmp_sum(i64 %x) { |
| 158 | ; ANY-LABEL: unsigned_sat_constant_i64_using_cmp_sum: |
| 159 | ; ANY: # %bb.0: |
| 160 | ; ANY-NEXT: addq $42, %rdi |
| 161 | ; ANY-NEXT: movq $-1, %rax |
| 162 | ; ANY-NEXT: cmovaeq %rdi, %rax |
| 163 | ; ANY-NEXT: retq |
| 164 | %a = add i64 %x, 42 |
| 165 | %c = icmp ugt i64 %x, %a |
| 166 | %r = select i1 %c, i64 -1, i64 %a |
| 167 | ret i64 %r |
| 168 | } |
| 169 | |
| 170 | define i64 @unsigned_sat_constant_i64_using_cmp_notval(i64 %x) { |
| 171 | ; ANY-LABEL: unsigned_sat_constant_i64_using_cmp_notval: |
| 172 | ; ANY: # %bb.0: |
Sanjay Patel | 2c90174 | 2018-09-24 14:47:15 +0000 | [diff] [blame] | 173 | ; ANY-NEXT: addq $42, %rdi |
| 174 | ; ANY-NEXT: movq $-1, %rax |
| 175 | ; ANY-NEXT: cmovaeq %rdi, %rax |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 176 | ; ANY-NEXT: retq |
| 177 | %a = add i64 %x, 42 |
| 178 | %c = icmp ugt i64 %x, -43 |
| 179 | %r = select i1 %c, i64 -1, i64 %a |
| 180 | ret i64 %r |
| 181 | } |
| 182 | |
| 183 | define i8 @unsigned_sat_variable_i8_using_min(i8 %x, i8 %y) { |
| 184 | ; ANY-LABEL: unsigned_sat_variable_i8_using_min: |
| 185 | ; ANY: # %bb.0: |
Simon Pilgrim | 2d0f20c | 2018-09-19 18:59:08 +0000 | [diff] [blame] | 186 | ; ANY-NEXT: movl %edi, %eax |
| 187 | ; ANY-NEXT: movl %esi, %ecx |
| 188 | ; ANY-NEXT: notb %cl |
| 189 | ; ANY-NEXT: cmpb %cl, %al |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 190 | ; ANY-NEXT: jb .LBB12_2 |
| 191 | ; ANY-NEXT: # %bb.1: |
Simon Pilgrim | 2d0f20c | 2018-09-19 18:59:08 +0000 | [diff] [blame] | 192 | ; ANY-NEXT: movl %ecx, %eax |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 193 | ; ANY-NEXT: .LBB12_2: |
Simon Pilgrim | 2d0f20c | 2018-09-19 18:59:08 +0000 | [diff] [blame] | 194 | ; ANY-NEXT: addb %sil, %al |
| 195 | ; ANY-NEXT: # kill: def $al killed $al killed $eax |
Sanjay Patel | 7feb3ed | 2018-09-10 17:40:15 +0000 | [diff] [blame] | 196 | ; ANY-NEXT: retq |
| 197 | %noty = xor i8 %y, -1 |
| 198 | %c = icmp ult i8 %x, %noty |
| 199 | %s = select i1 %c, i8 %x, i8 %noty |
| 200 | %r = add i8 %s, %y |
| 201 | ret i8 %r |
| 202 | } |
| 203 | |
| 204 | define i8 @unsigned_sat_variable_i8_using_cmp_sum(i8 %x, i8 %y) { |
| 205 | ; ANY-LABEL: unsigned_sat_variable_i8_using_cmp_sum: |
| 206 | ; ANY: # %bb.0: |
| 207 | ; ANY-NEXT: addb %sil, %dil |
| 208 | ; ANY-NEXT: movb $-1, %al |
| 209 | ; ANY-NEXT: jb .LBB13_2 |
| 210 | ; ANY-NEXT: # %bb.1: |
| 211 | ; ANY-NEXT: movl %edi, %eax |
| 212 | ; ANY-NEXT: .LBB13_2: |
| 213 | ; ANY-NEXT: retq |
| 214 | %a = add i8 %x, %y |
| 215 | %c = icmp ugt i8 %x, %a |
| 216 | %r = select i1 %c, i8 -1, i8 %a |
| 217 | ret i8 %r |
| 218 | } |
| 219 | |
| 220 | define i8 @unsigned_sat_variable_i8_using_cmp_notval(i8 %x, i8 %y) { |
| 221 | ; ANY-LABEL: unsigned_sat_variable_i8_using_cmp_notval: |
| 222 | ; ANY: # %bb.0: |
| 223 | ; ANY-NEXT: movl %esi, %eax |
| 224 | ; ANY-NEXT: notb %al |
| 225 | ; ANY-NEXT: cmpb %al, %dil |
| 226 | ; ANY-NEXT: movb $-1, %al |
| 227 | ; ANY-NEXT: ja .LBB14_2 |
| 228 | ; ANY-NEXT: # %bb.1: |
| 229 | ; ANY-NEXT: addb %sil, %dil |
| 230 | ; ANY-NEXT: movl %edi, %eax |
| 231 | ; ANY-NEXT: .LBB14_2: |
| 232 | ; ANY-NEXT: retq |
| 233 | %noty = xor i8 %y, -1 |
| 234 | %a = add i8 %x, %y |
| 235 | %c = icmp ugt i8 %x, %noty |
| 236 | %r = select i1 %c, i8 -1, i8 %a |
| 237 | ret i8 %r |
| 238 | } |
| 239 | |
| 240 | define i16 @unsigned_sat_variable_i16_using_min(i16 %x, i16 %y) { |
| 241 | ; ANY-LABEL: unsigned_sat_variable_i16_using_min: |
| 242 | ; ANY: # %bb.0: |
| 243 | ; ANY-NEXT: # kill: def $esi killed $esi def $rsi |
| 244 | ; ANY-NEXT: movl %esi, %eax |
| 245 | ; ANY-NEXT: notl %eax |
| 246 | ; ANY-NEXT: cmpw %ax, %di |
| 247 | ; ANY-NEXT: cmovbl %edi, %eax |
| 248 | ; ANY-NEXT: leal (%rax,%rsi), %eax |
| 249 | ; ANY-NEXT: # kill: def $ax killed $ax killed $eax |
| 250 | ; ANY-NEXT: retq |
| 251 | %noty = xor i16 %y, -1 |
| 252 | %c = icmp ult i16 %x, %noty |
| 253 | %s = select i1 %c, i16 %x, i16 %noty |
| 254 | %r = add i16 %s, %y |
| 255 | ret i16 %r |
| 256 | } |
| 257 | |
| 258 | define i16 @unsigned_sat_variable_i16_using_cmp_sum(i16 %x, i16 %y) { |
| 259 | ; ANY-LABEL: unsigned_sat_variable_i16_using_cmp_sum: |
| 260 | ; ANY: # %bb.0: |
| 261 | ; ANY-NEXT: addw %si, %di |
| 262 | ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF |
| 263 | ; ANY-NEXT: cmovael %edi, %eax |
| 264 | ; ANY-NEXT: # kill: def $ax killed $ax killed $eax |
| 265 | ; ANY-NEXT: retq |
| 266 | %a = add i16 %x, %y |
| 267 | %c = icmp ugt i16 %x, %a |
| 268 | %r = select i1 %c, i16 -1, i16 %a |
| 269 | ret i16 %r |
| 270 | } |
| 271 | |
| 272 | define i16 @unsigned_sat_variable_i16_using_cmp_notval(i16 %x, i16 %y) { |
| 273 | ; ANY-LABEL: unsigned_sat_variable_i16_using_cmp_notval: |
| 274 | ; ANY: # %bb.0: |
| 275 | ; ANY-NEXT: # kill: def $esi killed $esi def $rsi |
| 276 | ; ANY-NEXT: # kill: def $edi killed $edi def $rdi |
| 277 | ; ANY-NEXT: leal (%rdi,%rsi), %ecx |
| 278 | ; ANY-NEXT: notl %esi |
| 279 | ; ANY-NEXT: cmpw %si, %di |
| 280 | ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF |
| 281 | ; ANY-NEXT: cmovbel %ecx, %eax |
| 282 | ; ANY-NEXT: # kill: def $ax killed $ax killed $eax |
| 283 | ; ANY-NEXT: retq |
| 284 | %noty = xor i16 %y, -1 |
| 285 | %a = add i16 %x, %y |
| 286 | %c = icmp ugt i16 %x, %noty |
| 287 | %r = select i1 %c, i16 -1, i16 %a |
| 288 | ret i16 %r |
| 289 | } |
| 290 | |
| 291 | define i32 @unsigned_sat_variable_i32_using_min(i32 %x, i32 %y) { |
| 292 | ; ANY-LABEL: unsigned_sat_variable_i32_using_min: |
| 293 | ; ANY: # %bb.0: |
| 294 | ; ANY-NEXT: # kill: def $esi killed $esi def $rsi |
| 295 | ; ANY-NEXT: movl %esi, %eax |
| 296 | ; ANY-NEXT: notl %eax |
| 297 | ; ANY-NEXT: cmpl %eax, %edi |
| 298 | ; ANY-NEXT: cmovbl %edi, %eax |
| 299 | ; ANY-NEXT: leal (%rax,%rsi), %eax |
| 300 | ; ANY-NEXT: retq |
| 301 | %noty = xor i32 %y, -1 |
| 302 | %c = icmp ult i32 %x, %noty |
| 303 | %s = select i1 %c, i32 %x, i32 %noty |
| 304 | %r = add i32 %s, %y |
| 305 | ret i32 %r |
| 306 | } |
| 307 | |
| 308 | define i32 @unsigned_sat_variable_i32_using_cmp_sum(i32 %x, i32 %y) { |
| 309 | ; ANY-LABEL: unsigned_sat_variable_i32_using_cmp_sum: |
| 310 | ; ANY: # %bb.0: |
| 311 | ; ANY-NEXT: addl %esi, %edi |
| 312 | ; ANY-NEXT: movl $-1, %eax |
| 313 | ; ANY-NEXT: cmovael %edi, %eax |
| 314 | ; ANY-NEXT: retq |
| 315 | %a = add i32 %x, %y |
| 316 | %c = icmp ugt i32 %x, %a |
| 317 | %r = select i1 %c, i32 -1, i32 %a |
| 318 | ret i32 %r |
| 319 | } |
| 320 | |
| 321 | define i32 @unsigned_sat_variable_i32_using_cmp_notval(i32 %x, i32 %y) { |
| 322 | ; ANY-LABEL: unsigned_sat_variable_i32_using_cmp_notval: |
| 323 | ; ANY: # %bb.0: |
| 324 | ; ANY-NEXT: # kill: def $esi killed $esi def $rsi |
| 325 | ; ANY-NEXT: # kill: def $edi killed $edi def $rdi |
| 326 | ; ANY-NEXT: leal (%rdi,%rsi), %ecx |
| 327 | ; ANY-NEXT: notl %esi |
| 328 | ; ANY-NEXT: cmpl %esi, %edi |
| 329 | ; ANY-NEXT: movl $-1, %eax |
| 330 | ; ANY-NEXT: cmovbel %ecx, %eax |
| 331 | ; ANY-NEXT: retq |
| 332 | %noty = xor i32 %y, -1 |
| 333 | %a = add i32 %x, %y |
| 334 | %c = icmp ugt i32 %x, %noty |
| 335 | %r = select i1 %c, i32 -1, i32 %a |
| 336 | ret i32 %r |
| 337 | } |
| 338 | |
| 339 | define i64 @unsigned_sat_variable_i64_using_min(i64 %x, i64 %y) { |
| 340 | ; ANY-LABEL: unsigned_sat_variable_i64_using_min: |
| 341 | ; ANY: # %bb.0: |
| 342 | ; ANY-NEXT: movq %rsi, %rax |
| 343 | ; ANY-NEXT: notq %rax |
| 344 | ; ANY-NEXT: cmpq %rax, %rdi |
| 345 | ; ANY-NEXT: cmovbq %rdi, %rax |
| 346 | ; ANY-NEXT: leaq (%rax,%rsi), %rax |
| 347 | ; ANY-NEXT: retq |
| 348 | %noty = xor i64 %y, -1 |
| 349 | %c = icmp ult i64 %x, %noty |
| 350 | %s = select i1 %c, i64 %x, i64 %noty |
| 351 | %r = add i64 %s, %y |
| 352 | ret i64 %r |
| 353 | } |
| 354 | |
| 355 | define i64 @unsigned_sat_variable_i64_using_cmp_sum(i64 %x, i64 %y) { |
| 356 | ; ANY-LABEL: unsigned_sat_variable_i64_using_cmp_sum: |
| 357 | ; ANY: # %bb.0: |
| 358 | ; ANY-NEXT: addq %rsi, %rdi |
| 359 | ; ANY-NEXT: movq $-1, %rax |
| 360 | ; ANY-NEXT: cmovaeq %rdi, %rax |
| 361 | ; ANY-NEXT: retq |
| 362 | %a = add i64 %x, %y |
| 363 | %c = icmp ugt i64 %x, %a |
| 364 | %r = select i1 %c, i64 -1, i64 %a |
| 365 | ret i64 %r |
| 366 | } |
| 367 | |
| 368 | define i64 @unsigned_sat_variable_i64_using_cmp_notval(i64 %x, i64 %y) { |
| 369 | ; ANY-LABEL: unsigned_sat_variable_i64_using_cmp_notval: |
| 370 | ; ANY: # %bb.0: |
| 371 | ; ANY-NEXT: leaq (%rdi,%rsi), %rcx |
| 372 | ; ANY-NEXT: notq %rsi |
| 373 | ; ANY-NEXT: cmpq %rsi, %rdi |
| 374 | ; ANY-NEXT: movq $-1, %rax |
| 375 | ; ANY-NEXT: cmovbeq %rcx, %rax |
| 376 | ; ANY-NEXT: retq |
| 377 | %noty = xor i64 %y, -1 |
| 378 | %a = add i64 %x, %y |
| 379 | %c = icmp ugt i64 %x, %noty |
| 380 | %r = select i1 %c, i64 -1, i64 %a |
| 381 | ret i64 %r |
| 382 | } |
| 383 | |
| 384 | define <16 x i8> @unsigned_sat_constant_v16i8_using_min(<16 x i8> %x) { |
| 385 | ; ANY-LABEL: unsigned_sat_constant_v16i8_using_min: |
| 386 | ; ANY: # %bb.0: |
| 387 | ; ANY-NEXT: pminub {{.*}}(%rip), %xmm0 |
| 388 | ; ANY-NEXT: paddb {{.*}}(%rip), %xmm0 |
| 389 | ; ANY-NEXT: retq |
| 390 | %c = icmp ult <16 x i8> %x, <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43> |
| 391 | %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43> |
| 392 | %r = add <16 x i8> %s, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42> |
| 393 | ret <16 x i8> %r |
| 394 | } |
| 395 | |
| 396 | define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_sum(<16 x i8> %x) { |
| 397 | ; ANY-LABEL: unsigned_sat_constant_v16i8_using_cmp_sum: |
| 398 | ; ANY: # %bb.0: |
| 399 | ; ANY-NEXT: paddusb {{.*}}(%rip), %xmm0 |
| 400 | ; ANY-NEXT: retq |
| 401 | %a = add <16 x i8> %x, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42> |
| 402 | %c = icmp ugt <16 x i8> %x, %a |
| 403 | %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a |
| 404 | ret <16 x i8> %r |
| 405 | } |
| 406 | |
| 407 | define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_notval(<16 x i8> %x) { |
| 408 | ; ANY-LABEL: unsigned_sat_constant_v16i8_using_cmp_notval: |
| 409 | ; ANY: # %bb.0: |
| 410 | ; ANY-NEXT: paddusb {{.*}}(%rip), %xmm0 |
| 411 | ; ANY-NEXT: retq |
| 412 | %a = add <16 x i8> %x, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42> |
| 413 | %c = icmp ugt <16 x i8> %x, <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43> |
| 414 | %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a |
| 415 | ret <16 x i8> %r |
| 416 | } |
| 417 | |
| 418 | define <8 x i16> @unsigned_sat_constant_v8i16_using_min(<8 x i16> %x) { |
| 419 | ; SSE2-LABEL: unsigned_sat_constant_v8i16_using_min: |
| 420 | ; SSE2: # %bb.0: |
| 421 | ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [32768,32768,32768,32768,32768,32768,32768,32768] |
| 422 | ; SSE2-NEXT: pxor %xmm1, %xmm0 |
| 423 | ; SSE2-NEXT: pminsw {{.*}}(%rip), %xmm0 |
| 424 | ; SSE2-NEXT: pxor %xmm1, %xmm0 |
| 425 | ; SSE2-NEXT: paddw {{.*}}(%rip), %xmm0 |
| 426 | ; SSE2-NEXT: retq |
| 427 | ; |
| 428 | ; SSE41-LABEL: unsigned_sat_constant_v8i16_using_min: |
| 429 | ; SSE41: # %bb.0: |
| 430 | ; SSE41-NEXT: pminuw {{.*}}(%rip), %xmm0 |
| 431 | ; SSE41-NEXT: paddw {{.*}}(%rip), %xmm0 |
| 432 | ; SSE41-NEXT: retq |
| 433 | %c = icmp ult <8 x i16> %x, <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43> |
| 434 | %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43> |
| 435 | %r = add <8 x i16> %s, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42> |
| 436 | ret <8 x i16> %r |
| 437 | } |
| 438 | |
| 439 | define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_sum(<8 x i16> %x) { |
| 440 | ; ANY-LABEL: unsigned_sat_constant_v8i16_using_cmp_sum: |
| 441 | ; ANY: # %bb.0: |
| 442 | ; ANY-NEXT: paddusw {{.*}}(%rip), %xmm0 |
| 443 | ; ANY-NEXT: retq |
| 444 | %a = add <8 x i16> %x, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42> |
| 445 | %c = icmp ugt <8 x i16> %x, %a |
| 446 | %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a |
| 447 | ret <8 x i16> %r |
| 448 | } |
| 449 | |
| 450 | define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_notval(<8 x i16> %x) { |
| 451 | ; ANY-LABEL: unsigned_sat_constant_v8i16_using_cmp_notval: |
| 452 | ; ANY: # %bb.0: |
| 453 | ; ANY-NEXT: paddusw {{.*}}(%rip), %xmm0 |
| 454 | ; ANY-NEXT: retq |
| 455 | %a = add <8 x i16> %x, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42> |
| 456 | %c = icmp ugt <8 x i16> %x, <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43> |
| 457 | %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a |
| 458 | ret <8 x i16> %r |
| 459 | } |
| 460 | |
| 461 | define <4 x i32> @unsigned_sat_constant_v4i32_using_min(<4 x i32> %x) { |
| 462 | ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_min: |
| 463 | ; SSE2: # %bb.0: |
| 464 | ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648,2147483648,2147483648] |
| 465 | ; SSE2-NEXT: pxor %xmm0, %xmm1 |
| 466 | ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483605,2147483605,2147483605,2147483605] |
| 467 | ; SSE2-NEXT: pcmpgtd %xmm1, %xmm2 |
| 468 | ; SSE2-NEXT: pand %xmm2, %xmm0 |
| 469 | ; SSE2-NEXT: pandn {{.*}}(%rip), %xmm2 |
| 470 | ; SSE2-NEXT: por %xmm2, %xmm0 |
| 471 | ; SSE2-NEXT: paddd {{.*}}(%rip), %xmm0 |
| 472 | ; SSE2-NEXT: retq |
| 473 | ; |
| 474 | ; SSE41-LABEL: unsigned_sat_constant_v4i32_using_min: |
| 475 | ; SSE41: # %bb.0: |
| 476 | ; SSE41-NEXT: pminud {{.*}}(%rip), %xmm0 |
| 477 | ; SSE41-NEXT: paddd {{.*}}(%rip), %xmm0 |
| 478 | ; SSE41-NEXT: retq |
| 479 | %c = icmp ult <4 x i32> %x, <i32 -43, i32 -43, i32 -43, i32 -43> |
| 480 | %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> <i32 -43, i32 -43, i32 -43, i32 -43> |
| 481 | %r = add <4 x i32> %s, <i32 42, i32 42, i32 42, i32 42> |
| 482 | ret <4 x i32> %r |
| 483 | } |
| 484 | |
| 485 | define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_sum(<4 x i32> %x) { |
| 486 | ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum: |
| 487 | ; SSE2: # %bb.0: |
| 488 | ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42] |
| 489 | ; SSE2-NEXT: paddd %xmm0, %xmm1 |
| 490 | ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] |
| 491 | ; SSE2-NEXT: pxor %xmm2, %xmm0 |
| 492 | ; SSE2-NEXT: pxor %xmm1, %xmm2 |
| 493 | ; SSE2-NEXT: pcmpgtd %xmm2, %xmm0 |
| 494 | ; SSE2-NEXT: por %xmm1, %xmm0 |
| 495 | ; SSE2-NEXT: retq |
| 496 | ; |
| 497 | ; SSE41-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum: |
| 498 | ; SSE41: # %bb.0: |
| 499 | ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [42,42,42,42] |
| 500 | ; SSE41-NEXT: paddd %xmm0, %xmm2 |
| 501 | ; SSE41-NEXT: movdqa %xmm0, %xmm1 |
| 502 | ; SSE41-NEXT: pminud %xmm2, %xmm1 |
| 503 | ; SSE41-NEXT: pcmpeqd %xmm0, %xmm1 |
| 504 | ; SSE41-NEXT: pcmpeqd %xmm0, %xmm0 |
| 505 | ; SSE41-NEXT: pxor %xmm0, %xmm1 |
| 506 | ; SSE41-NEXT: por %xmm2, %xmm1 |
| 507 | ; SSE41-NEXT: movdqa %xmm1, %xmm0 |
| 508 | ; SSE41-NEXT: retq |
| 509 | %a = add <4 x i32> %x, <i32 42, i32 42, i32 42, i32 42> |
| 510 | %c = icmp ugt <4 x i32> %x, %a |
| 511 | %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a |
| 512 | ret <4 x i32> %r |
| 513 | } |
| 514 | |
| 515 | define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_notval(<4 x i32> %x) { |
| 516 | ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval: |
| 517 | ; SSE2: # %bb.0: |
| 518 | ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42] |
| 519 | ; SSE2-NEXT: paddd %xmm0, %xmm1 |
| 520 | ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 |
| 521 | ; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm0 |
| 522 | ; SSE2-NEXT: por %xmm1, %xmm0 |
| 523 | ; SSE2-NEXT: retq |
| 524 | ; |
| 525 | ; SSE41-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval: |
| 526 | ; SSE41: # %bb.0: |
| 527 | ; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42] |
| 528 | ; SSE41-NEXT: paddd %xmm0, %xmm1 |
| 529 | ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [4294967253,4294967253,4294967253,4294967253] |
| 530 | ; SSE41-NEXT: pminud %xmm0, %xmm2 |
| 531 | ; SSE41-NEXT: pcmpeqd %xmm2, %xmm0 |
| 532 | ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 |
| 533 | ; SSE41-NEXT: pxor %xmm2, %xmm0 |
| 534 | ; SSE41-NEXT: por %xmm1, %xmm0 |
| 535 | ; SSE41-NEXT: retq |
| 536 | %a = add <4 x i32> %x, <i32 42, i32 42, i32 42, i32 42> |
| 537 | %c = icmp ugt <4 x i32> %x, <i32 -43, i32 -43, i32 -43, i32 -43> |
| 538 | %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a |
| 539 | ret <4 x i32> %r |
| 540 | } |
| 541 | |
| 542 | define <2 x i64> @unsigned_sat_constant_v2i64_using_min(<2 x i64> %x) { |
| 543 | ; SSE2-LABEL: unsigned_sat_constant_v2i64_using_min: |
| 544 | ; SSE2: # %bb.0: |
| 545 | ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648,2147483648,2147483648] |
| 546 | ; SSE2-NEXT: pxor %xmm0, %xmm1 |
| 547 | ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117] |
| 548 | ; SSE2-NEXT: movdqa %xmm2, %xmm3 |
| 549 | ; SSE2-NEXT: pcmpgtd %xmm1, %xmm3 |
| 550 | ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] |
| 551 | ; SSE2-NEXT: pcmpeqd %xmm2, %xmm1 |
| 552 | ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] |
| 553 | ; SSE2-NEXT: pand %xmm4, %xmm1 |
| 554 | ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3] |
| 555 | ; SSE2-NEXT: por %xmm1, %xmm2 |
| 556 | ; SSE2-NEXT: pand %xmm2, %xmm0 |
| 557 | ; SSE2-NEXT: pandn {{.*}}(%rip), %xmm2 |
| 558 | ; SSE2-NEXT: por %xmm2, %xmm0 |
| 559 | ; SSE2-NEXT: paddq {{.*}}(%rip), %xmm0 |
| 560 | ; SSE2-NEXT: retq |
| 561 | ; |
| 562 | ; SSE41-LABEL: unsigned_sat_constant_v2i64_using_min: |
| 563 | ; SSE41: # %bb.0: |
| 564 | ; SSE41-NEXT: movdqa %xmm0, %xmm1 |
| 565 | ; SSE41-NEXT: movapd {{.*#+}} xmm2 = [18446744073709551573,18446744073709551573] |
| 566 | ; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648] |
| 567 | ; SSE41-NEXT: pxor %xmm1, %xmm0 |
| 568 | ; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [9223372034707292117,9223372034707292117] |
| 569 | ; SSE41-NEXT: movdqa %xmm3, %xmm4 |
| 570 | ; SSE41-NEXT: pcmpgtd %xmm0, %xmm4 |
| 571 | ; SSE41-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] |
| 572 | ; SSE41-NEXT: pcmpeqd %xmm3, %xmm0 |
| 573 | ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] |
| 574 | ; SSE41-NEXT: pand %xmm5, %xmm3 |
| 575 | ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,3,3] |
| 576 | ; SSE41-NEXT: por %xmm3, %xmm0 |
| 577 | ; SSE41-NEXT: blendvpd %xmm0, %xmm1, %xmm2 |
| 578 | ; SSE41-NEXT: paddq {{.*}}(%rip), %xmm2 |
| 579 | ; SSE41-NEXT: movdqa %xmm2, %xmm0 |
| 580 | ; SSE41-NEXT: retq |
| 581 | %c = icmp ult <2 x i64> %x, <i64 -43, i64 -43> |
| 582 | %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> <i64 -43, i64 -43> |
| 583 | %r = add <2 x i64> %s, <i64 42, i64 42> |
| 584 | ret <2 x i64> %r |
| 585 | } |
| 586 | |
| 587 | define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_sum(<2 x i64> %x) { |
| 588 | ; ANY-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum: |
| 589 | ; ANY: # %bb.0: |
| 590 | ; ANY-NEXT: movdqa {{.*#+}} xmm1 = [42,42] |
| 591 | ; ANY-NEXT: paddq %xmm0, %xmm1 |
| 592 | ; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] |
| 593 | ; ANY-NEXT: pxor %xmm2, %xmm0 |
| 594 | ; ANY-NEXT: pxor %xmm1, %xmm2 |
| 595 | ; ANY-NEXT: movdqa %xmm0, %xmm3 |
| 596 | ; ANY-NEXT: pcmpgtd %xmm2, %xmm3 |
| 597 | ; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] |
| 598 | ; ANY-NEXT: pcmpeqd %xmm0, %xmm2 |
| 599 | ; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] |
| 600 | ; ANY-NEXT: pand %xmm4, %xmm2 |
| 601 | ; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] |
| 602 | ; ANY-NEXT: por %xmm1, %xmm0 |
| 603 | ; ANY-NEXT: por %xmm2, %xmm0 |
| 604 | ; ANY-NEXT: retq |
| 605 | %a = add <2 x i64> %x, <i64 42, i64 42> |
| 606 | %c = icmp ugt <2 x i64> %x, %a |
| 607 | %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a |
| 608 | ret <2 x i64> %r |
| 609 | } |
| 610 | |
| 611 | define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_notval(<2 x i64> %x) { |
| 612 | ; ANY-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval: |
| 613 | ; ANY: # %bb.0: |
| 614 | ; ANY-NEXT: movdqa {{.*#+}} xmm1 = [42,42] |
| 615 | ; ANY-NEXT: paddq %xmm0, %xmm1 |
| 616 | ; ANY-NEXT: pxor {{.*}}(%rip), %xmm0 |
| 617 | ; ANY-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117] |
| 618 | ; ANY-NEXT: movdqa %xmm0, %xmm3 |
| 619 | ; ANY-NEXT: pcmpgtd %xmm2, %xmm3 |
| 620 | ; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] |
| 621 | ; ANY-NEXT: pcmpeqd %xmm2, %xmm0 |
| 622 | ; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] |
| 623 | ; ANY-NEXT: pand %xmm4, %xmm2 |
| 624 | ; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] |
| 625 | ; ANY-NEXT: por %xmm1, %xmm0 |
| 626 | ; ANY-NEXT: por %xmm2, %xmm0 |
| 627 | ; ANY-NEXT: retq |
| 628 | %a = add <2 x i64> %x, <i64 42, i64 42> |
| 629 | %c = icmp ugt <2 x i64> %x, <i64 -43, i64 -43> |
| 630 | %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a |
| 631 | ret <2 x i64> %r |
| 632 | } |
| 633 | |
| 634 | define <16 x i8> @unsigned_sat_variable_v16i8_using_min(<16 x i8> %x, <16 x i8> %y) { |
| 635 | ; ANY-LABEL: unsigned_sat_variable_v16i8_using_min: |
| 636 | ; ANY: # %bb.0: |
| 637 | ; ANY-NEXT: pcmpeqd %xmm2, %xmm2 |
| 638 | ; ANY-NEXT: pxor %xmm1, %xmm2 |
| 639 | ; ANY-NEXT: pminub %xmm2, %xmm0 |
| 640 | ; ANY-NEXT: paddb %xmm1, %xmm0 |
| 641 | ; ANY-NEXT: retq |
| 642 | %noty = xor <16 x i8> %y, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1> |
| 643 | %c = icmp ult <16 x i8> %x, %noty |
| 644 | %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> %noty |
| 645 | %r = add <16 x i8> %s, %y |
| 646 | ret <16 x i8> %r |
| 647 | } |
| 648 | |
| 649 | define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_sum(<16 x i8> %x, <16 x i8> %y) { |
| 650 | ; ANY-LABEL: unsigned_sat_variable_v16i8_using_cmp_sum: |
| 651 | ; ANY: # %bb.0: |
| 652 | ; ANY-NEXT: paddusb %xmm1, %xmm0 |
| 653 | ; ANY-NEXT: retq |
| 654 | %a = add <16 x i8> %x, %y |
| 655 | %c = icmp ugt <16 x i8> %x, %a |
| 656 | %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a |
| 657 | ret <16 x i8> %r |
| 658 | } |
| 659 | |
| 660 | define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_notval(<16 x i8> %x, <16 x i8> %y) { |
| 661 | ; ANY-LABEL: unsigned_sat_variable_v16i8_using_cmp_notval: |
| 662 | ; ANY: # %bb.0: |
| 663 | ; ANY-NEXT: pcmpeqd %xmm2, %xmm2 |
| 664 | ; ANY-NEXT: movdqa %xmm0, %xmm3 |
| 665 | ; ANY-NEXT: paddb %xmm1, %xmm3 |
| 666 | ; ANY-NEXT: pxor %xmm2, %xmm1 |
| 667 | ; ANY-NEXT: pminub %xmm0, %xmm1 |
| 668 | ; ANY-NEXT: pcmpeqb %xmm1, %xmm0 |
| 669 | ; ANY-NEXT: pxor %xmm2, %xmm0 |
| 670 | ; ANY-NEXT: por %xmm3, %xmm0 |
| 671 | ; ANY-NEXT: retq |
| 672 | %noty = xor <16 x i8> %y, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1> |
| 673 | %a = add <16 x i8> %x, %y |
| 674 | %c = icmp ugt <16 x i8> %x, %noty |
| 675 | %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a |
| 676 | ret <16 x i8> %r |
| 677 | } |
| 678 | |
| 679 | define <8 x i16> @unsigned_sat_variable_v8i16_using_min(<8 x i16> %x, <8 x i16> %y) { |
| 680 | ; SSE2-LABEL: unsigned_sat_variable_v8i16_using_min: |
| 681 | ; SSE2: # %bb.0: |
| 682 | ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 |
| 683 | ; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [32768,32768,32768,32768,32768,32768,32768,32768] |
| 684 | ; SSE2-NEXT: pxor %xmm3, %xmm0 |
| 685 | ; SSE2-NEXT: pxor %xmm3, %xmm2 |
| 686 | ; SSE2-NEXT: pxor %xmm1, %xmm2 |
| 687 | ; SSE2-NEXT: pminsw %xmm2, %xmm0 |
| 688 | ; SSE2-NEXT: pxor %xmm3, %xmm0 |
| 689 | ; SSE2-NEXT: paddw %xmm1, %xmm0 |
| 690 | ; SSE2-NEXT: retq |
| 691 | ; |
| 692 | ; SSE41-LABEL: unsigned_sat_variable_v8i16_using_min: |
| 693 | ; SSE41: # %bb.0: |
| 694 | ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 |
| 695 | ; SSE41-NEXT: pxor %xmm1, %xmm2 |
| 696 | ; SSE41-NEXT: pminuw %xmm2, %xmm0 |
| 697 | ; SSE41-NEXT: paddw %xmm1, %xmm0 |
| 698 | ; SSE41-NEXT: retq |
| 699 | %noty = xor <8 x i16> %y, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1> |
| 700 | %c = icmp ult <8 x i16> %x, %noty |
| 701 | %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> %noty |
| 702 | %r = add <8 x i16> %s, %y |
| 703 | ret <8 x i16> %r |
| 704 | } |
| 705 | |
| 706 | define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_sum(<8 x i16> %x, <8 x i16> %y) { |
| 707 | ; ANY-LABEL: unsigned_sat_variable_v8i16_using_cmp_sum: |
| 708 | ; ANY: # %bb.0: |
| 709 | ; ANY-NEXT: paddusw %xmm1, %xmm0 |
| 710 | ; ANY-NEXT: retq |
| 711 | %a = add <8 x i16> %x, %y |
| 712 | %c = icmp ugt <8 x i16> %x, %a |
| 713 | %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a |
| 714 | ret <8 x i16> %r |
| 715 | } |
| 716 | |
| 717 | define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_notval(<8 x i16> %x, <8 x i16> %y) { |
| 718 | ; SSE2-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval: |
| 719 | ; SSE2: # %bb.0: |
| 720 | ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 |
| 721 | ; SSE2-NEXT: movdqa %xmm0, %xmm3 |
| 722 | ; SSE2-NEXT: paddw %xmm1, %xmm3 |
| 723 | ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [32768,32768,32768,32768,32768,32768,32768,32768] |
| 724 | ; SSE2-NEXT: pxor %xmm4, %xmm0 |
| 725 | ; SSE2-NEXT: pxor %xmm4, %xmm2 |
| 726 | ; SSE2-NEXT: pxor %xmm1, %xmm2 |
| 727 | ; SSE2-NEXT: pcmpgtw %xmm2, %xmm0 |
| 728 | ; SSE2-NEXT: por %xmm3, %xmm0 |
| 729 | ; SSE2-NEXT: retq |
| 730 | ; |
| 731 | ; SSE41-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval: |
| 732 | ; SSE41: # %bb.0: |
| 733 | ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 |
| 734 | ; SSE41-NEXT: movdqa %xmm0, %xmm3 |
| 735 | ; SSE41-NEXT: paddw %xmm1, %xmm3 |
| 736 | ; SSE41-NEXT: pxor %xmm2, %xmm1 |
| 737 | ; SSE41-NEXT: pminuw %xmm0, %xmm1 |
| 738 | ; SSE41-NEXT: pcmpeqw %xmm1, %xmm0 |
| 739 | ; SSE41-NEXT: pxor %xmm2, %xmm0 |
| 740 | ; SSE41-NEXT: por %xmm3, %xmm0 |
| 741 | ; SSE41-NEXT: retq |
| 742 | %noty = xor <8 x i16> %y, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1> |
| 743 | %a = add <8 x i16> %x, %y |
| 744 | %c = icmp ugt <8 x i16> %x, %noty |
| 745 | %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a |
| 746 | ret <8 x i16> %r |
| 747 | } |
| 748 | |
| 749 | define <4 x i32> @unsigned_sat_variable_v4i32_using_min(<4 x i32> %x, <4 x i32> %y) { |
| 750 | ; SSE2-LABEL: unsigned_sat_variable_v4i32_using_min: |
| 751 | ; SSE2: # %bb.0: |
| 752 | ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 |
| 753 | ; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648] |
| 754 | ; SSE2-NEXT: movdqa %xmm0, %xmm4 |
| 755 | ; SSE2-NEXT: pxor %xmm3, %xmm4 |
| 756 | ; SSE2-NEXT: pxor %xmm2, %xmm3 |
| 757 | ; SSE2-NEXT: pxor %xmm1, %xmm3 |
| 758 | ; SSE2-NEXT: pcmpgtd %xmm4, %xmm3 |
| 759 | ; SSE2-NEXT: pand %xmm3, %xmm0 |
| 760 | ; SSE2-NEXT: pxor %xmm2, %xmm3 |
| 761 | ; SSE2-NEXT: movdqa %xmm1, %xmm2 |
| 762 | ; SSE2-NEXT: pandn %xmm3, %xmm2 |
| 763 | ; SSE2-NEXT: por %xmm2, %xmm0 |
| 764 | ; SSE2-NEXT: paddd %xmm1, %xmm0 |
| 765 | ; SSE2-NEXT: retq |
| 766 | ; |
| 767 | ; SSE41-LABEL: unsigned_sat_variable_v4i32_using_min: |
| 768 | ; SSE41: # %bb.0: |
| 769 | ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 |
| 770 | ; SSE41-NEXT: pxor %xmm1, %xmm2 |
| 771 | ; SSE41-NEXT: pminud %xmm2, %xmm0 |
| 772 | ; SSE41-NEXT: paddd %xmm1, %xmm0 |
| 773 | ; SSE41-NEXT: retq |
| 774 | %noty = xor <4 x i32> %y, <i32 -1, i32 -1, i32 -1, i32 -1> |
| 775 | %c = icmp ult <4 x i32> %x, %noty |
| 776 | %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> %noty |
| 777 | %r = add <4 x i32> %s, %y |
| 778 | ret <4 x i32> %r |
| 779 | } |
| 780 | |
| 781 | define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_sum(<4 x i32> %x, <4 x i32> %y) { |
| 782 | ; SSE2-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum: |
| 783 | ; SSE2: # %bb.0: |
| 784 | ; SSE2-NEXT: paddd %xmm0, %xmm1 |
| 785 | ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] |
| 786 | ; SSE2-NEXT: pxor %xmm2, %xmm0 |
| 787 | ; SSE2-NEXT: pxor %xmm1, %xmm2 |
| 788 | ; SSE2-NEXT: pcmpgtd %xmm2, %xmm0 |
| 789 | ; SSE2-NEXT: por %xmm1, %xmm0 |
| 790 | ; SSE2-NEXT: retq |
| 791 | ; |
| 792 | ; SSE41-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum: |
| 793 | ; SSE41: # %bb.0: |
| 794 | ; SSE41-NEXT: paddd %xmm0, %xmm1 |
| 795 | ; SSE41-NEXT: movdqa %xmm0, %xmm2 |
| 796 | ; SSE41-NEXT: pminud %xmm1, %xmm2 |
| 797 | ; SSE41-NEXT: pcmpeqd %xmm0, %xmm2 |
| 798 | ; SSE41-NEXT: pcmpeqd %xmm0, %xmm0 |
| 799 | ; SSE41-NEXT: pxor %xmm0, %xmm2 |
| 800 | ; SSE41-NEXT: por %xmm1, %xmm2 |
| 801 | ; SSE41-NEXT: movdqa %xmm2, %xmm0 |
| 802 | ; SSE41-NEXT: retq |
| 803 | %a = add <4 x i32> %x, %y |
| 804 | %c = icmp ugt <4 x i32> %x, %a |
| 805 | %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a |
| 806 | ret <4 x i32> %r |
| 807 | } |
| 808 | |
| 809 | define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_notval(<4 x i32> %x, <4 x i32> %y) { |
| 810 | ; SSE2-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval: |
| 811 | ; SSE2: # %bb.0: |
| 812 | ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 |
| 813 | ; SSE2-NEXT: movdqa %xmm0, %xmm3 |
| 814 | ; SSE2-NEXT: paddd %xmm1, %xmm3 |
| 815 | ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648] |
| 816 | ; SSE2-NEXT: pxor %xmm4, %xmm0 |
| 817 | ; SSE2-NEXT: pxor %xmm4, %xmm2 |
| 818 | ; SSE2-NEXT: pxor %xmm1, %xmm2 |
| 819 | ; SSE2-NEXT: pcmpgtd %xmm2, %xmm0 |
| 820 | ; SSE2-NEXT: por %xmm3, %xmm0 |
| 821 | ; SSE2-NEXT: retq |
| 822 | ; |
| 823 | ; SSE41-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval: |
| 824 | ; SSE41: # %bb.0: |
| 825 | ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 |
| 826 | ; SSE41-NEXT: movdqa %xmm0, %xmm3 |
| 827 | ; SSE41-NEXT: paddd %xmm1, %xmm3 |
| 828 | ; SSE41-NEXT: pxor %xmm2, %xmm1 |
| 829 | ; SSE41-NEXT: pminud %xmm0, %xmm1 |
| 830 | ; SSE41-NEXT: pcmpeqd %xmm1, %xmm0 |
| 831 | ; SSE41-NEXT: pxor %xmm2, %xmm0 |
| 832 | ; SSE41-NEXT: por %xmm3, %xmm0 |
| 833 | ; SSE41-NEXT: retq |
| 834 | %noty = xor <4 x i32> %y, <i32 -1, i32 -1, i32 -1, i32 -1> |
| 835 | %a = add <4 x i32> %x, %y |
| 836 | %c = icmp ugt <4 x i32> %x, %noty |
| 837 | %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a |
| 838 | ret <4 x i32> %r |
| 839 | } |
| 840 | |
| 841 | define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64> %y) { |
| 842 | ; SSE2-LABEL: unsigned_sat_variable_v2i64_using_min: |
| 843 | ; SSE2: # %bb.0: |
| 844 | ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] |
| 845 | ; SSE2-NEXT: pcmpeqd %xmm3, %xmm3 |
| 846 | ; SSE2-NEXT: movdqa %xmm0, %xmm4 |
| 847 | ; SSE2-NEXT: pxor %xmm2, %xmm4 |
| 848 | ; SSE2-NEXT: pxor %xmm3, %xmm2 |
| 849 | ; SSE2-NEXT: pxor %xmm1, %xmm2 |
| 850 | ; SSE2-NEXT: movdqa %xmm2, %xmm5 |
| 851 | ; SSE2-NEXT: pcmpgtd %xmm4, %xmm5 |
| 852 | ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2] |
| 853 | ; SSE2-NEXT: pcmpeqd %xmm4, %xmm2 |
| 854 | ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] |
| 855 | ; SSE2-NEXT: pand %xmm6, %xmm2 |
| 856 | ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3] |
| 857 | ; SSE2-NEXT: por %xmm2, %xmm4 |
| 858 | ; SSE2-NEXT: pand %xmm4, %xmm0 |
| 859 | ; SSE2-NEXT: pxor %xmm3, %xmm4 |
| 860 | ; SSE2-NEXT: movdqa %xmm1, %xmm2 |
| 861 | ; SSE2-NEXT: pandn %xmm4, %xmm2 |
| 862 | ; SSE2-NEXT: por %xmm2, %xmm0 |
| 863 | ; SSE2-NEXT: paddq %xmm1, %xmm0 |
| 864 | ; SSE2-NEXT: retq |
| 865 | ; |
| 866 | ; SSE41-LABEL: unsigned_sat_variable_v2i64_using_min: |
| 867 | ; SSE41: # %bb.0: |
| 868 | ; SSE41-NEXT: movdqa %xmm0, %xmm2 |
| 869 | ; SSE41-NEXT: pcmpeqd %xmm3, %xmm3 |
| 870 | ; SSE41-NEXT: pxor %xmm1, %xmm3 |
| 871 | ; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648] |
| 872 | ; SSE41-NEXT: movdqa %xmm2, %xmm4 |
| 873 | ; SSE41-NEXT: pxor %xmm0, %xmm4 |
| 874 | ; SSE41-NEXT: pxor %xmm3, %xmm0 |
| 875 | ; SSE41-NEXT: movdqa %xmm0, %xmm5 |
| 876 | ; SSE41-NEXT: pcmpgtd %xmm4, %xmm5 |
| 877 | ; SSE41-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2] |
| 878 | ; SSE41-NEXT: pcmpeqd %xmm4, %xmm0 |
| 879 | ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,3,3] |
| 880 | ; SSE41-NEXT: pand %xmm6, %xmm4 |
| 881 | ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,3,3] |
| 882 | ; SSE41-NEXT: por %xmm4, %xmm0 |
| 883 | ; SSE41-NEXT: blendvpd %xmm0, %xmm2, %xmm3 |
| 884 | ; SSE41-NEXT: paddq %xmm1, %xmm3 |
| 885 | ; SSE41-NEXT: movdqa %xmm3, %xmm0 |
| 886 | ; SSE41-NEXT: retq |
| 887 | %noty = xor <2 x i64> %y, <i64 -1, i64 -1> |
| 888 | %c = icmp ult <2 x i64> %x, %noty |
| 889 | %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> %noty |
| 890 | %r = add <2 x i64> %s, %y |
| 891 | ret <2 x i64> %r |
| 892 | } |
| 893 | |
| 894 | define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i64> %y) { |
| 895 | ; ANY-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum: |
| 896 | ; ANY: # %bb.0: |
| 897 | ; ANY-NEXT: paddq %xmm0, %xmm1 |
| 898 | ; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] |
| 899 | ; ANY-NEXT: pxor %xmm2, %xmm0 |
| 900 | ; ANY-NEXT: pxor %xmm1, %xmm2 |
| 901 | ; ANY-NEXT: movdqa %xmm0, %xmm3 |
| 902 | ; ANY-NEXT: pcmpgtd %xmm2, %xmm3 |
| 903 | ; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] |
| 904 | ; ANY-NEXT: pcmpeqd %xmm0, %xmm2 |
| 905 | ; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] |
| 906 | ; ANY-NEXT: pand %xmm4, %xmm2 |
| 907 | ; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] |
| 908 | ; ANY-NEXT: por %xmm1, %xmm0 |
| 909 | ; ANY-NEXT: por %xmm2, %xmm0 |
| 910 | ; ANY-NEXT: retq |
| 911 | %a = add <2 x i64> %x, %y |
| 912 | %c = icmp ugt <2 x i64> %x, %a |
| 913 | %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a |
| 914 | ret <2 x i64> %r |
| 915 | } |
| 916 | |
| 917 | define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_notval(<2 x i64> %x, <2 x i64> %y) { |
| 918 | ; ANY-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval: |
| 919 | ; ANY: # %bb.0: |
| 920 | ; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] |
| 921 | ; ANY-NEXT: pcmpeqd %xmm3, %xmm3 |
| 922 | ; ANY-NEXT: movdqa %xmm0, %xmm4 |
| 923 | ; ANY-NEXT: paddq %xmm1, %xmm4 |
| 924 | ; ANY-NEXT: pxor %xmm2, %xmm0 |
| 925 | ; ANY-NEXT: pxor %xmm2, %xmm3 |
| 926 | ; ANY-NEXT: pxor %xmm1, %xmm3 |
| 927 | ; ANY-NEXT: movdqa %xmm0, %xmm1 |
| 928 | ; ANY-NEXT: pcmpgtd %xmm3, %xmm1 |
| 929 | ; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,2,2] |
| 930 | ; ANY-NEXT: pcmpeqd %xmm0, %xmm3 |
| 931 | ; ANY-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3] |
| 932 | ; ANY-NEXT: pand %xmm2, %xmm3 |
| 933 | ; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] |
| 934 | ; ANY-NEXT: por %xmm4, %xmm0 |
| 935 | ; ANY-NEXT: por %xmm3, %xmm0 |
| 936 | ; ANY-NEXT: retq |
| 937 | %noty = xor <2 x i64> %y, <i64 -1, i64 -1> |
| 938 | %a = add <2 x i64> %x, %y |
| 939 | %c = icmp ugt <2 x i64> %x, %noty |
| 940 | %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a |
| 941 | ret <2 x i64> %r |
| 942 | } |
| 943 | |