Asaf Badouh | 5a3a023 | 2016-02-01 15:48:21 +0000 | [diff] [blame] | 1 | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
| 2 | ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=skx -mattr=+avx512vbmi --show-mc-encoding| FileCheck %s
|
| 3 | declare <64 x i8> @llvm.x86.avx512.mask.permvar.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i64)
|
| 4 |
|
| 5 | define <64 x i8>@test_int_x86_avx512_mask_permvar_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) {
|
| 6 | ; CHECK-LABEL: test_int_x86_avx512_mask_permvar_qi_512:
|
| 7 | ; CHECK: ## BB#0:
|
| 8 | ; CHECK-NEXT: kmovq %rdi, %k1
|
| 9 | ; CHECK-NEXT: vpermb %zmm1, %zmm0, %zmm2 {%k1}
|
| 10 | ; CHECK-NEXT: vpermb %zmm1, %zmm0, %zmm3 {%k1} {z}
|
| 11 | ; CHECK-NEXT: vpermb %zmm1, %zmm0, %zmm0
|
| 12 | ; CHECK-NEXT: vpaddb %zmm3, %zmm2, %zmm1
|
| 13 | ; CHECK-NEXT: vpaddb %zmm0, %zmm1, %zmm0
|
| 14 | ; CHECK-NEXT: retq
|
| 15 | %res = call <64 x i8> @llvm.x86.avx512.mask.permvar.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3)
|
| 16 | %res1 = call <64 x i8> @llvm.x86.avx512.mask.permvar.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> zeroinitializer, i64 %x3)
|
| 17 | %res2 = call <64 x i8> @llvm.x86.avx512.mask.permvar.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 -1)
|
| 18 | %res3 = add <64 x i8> %res, %res1
|
| 19 | %res4 = add <64 x i8> %res3, %res2
|
| 20 | ret <64 x i8> %res4
|
| 21 | }
|
Michael Zuckerman | 65c40af | 2016-01-20 15:24:56 +0000 | [diff] [blame] | 22 | |
Asaf Badouh | 5a3a023 | 2016-02-01 15:48:21 +0000 | [diff] [blame] | 23 | declare <64 x i8> @llvm.x86.avx512.mask.pmultishift.qb.512(<64 x i8>, <64 x i8>, <64 x i8>, i64) |
| 24 | |
| 25 | define <64 x i8>@test_int_x86_avx512_mask_pmultishift_qb_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) { |
| 26 | ; CHECK-LABEL: test_int_x86_avx512_mask_pmultishift_qb_512: |
| 27 | ; CHECK: vpmultishiftqb %zmm1, %zmm0, %zmm2 {%k1} |
| 28 | ; CHECK: vpmultishiftqb %zmm1, %zmm0, %zmm3 {%k1} {z} |
| 29 | ; CHECK: vpmultishiftqb %zmm1, %zmm0, %zmm0 |
| 30 | ; CHECK: vpaddb %zmm3, %zmm2, %zmm1 |
| 31 | ; CHECK: vpaddb %zmm0, %zmm1, %zmm0 |
| 32 | %res = call <64 x i8> @llvm.x86.avx512.mask.pmultishift.qb.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) |
| 33 | %res1 = call <64 x i8> @llvm.x86.avx512.mask.pmultishift.qb.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> zeroinitializer, i64 %x3) |
| 34 | %res2 = call <64 x i8> @llvm.x86.avx512.mask.pmultishift.qb.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 -1) |
| 35 | %res3 = add <64 x i8> %res, %res1 |
| 36 | %res4 = add <64 x i8> %res3, %res2 |
| 37 | ret <64 x i8> %res4 |
Michael Zuckerman | 65c40af | 2016-01-20 15:24:56 +0000 | [diff] [blame] | 38 | } |
Michael Zuckerman | 21a30a4 | 2016-01-21 13:36:01 +0000 | [diff] [blame] | 39 | |
| 40 | declare <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i64) |
| 41 | |
| 42 | define <64 x i8>@test_int_x86_avx512_mask_vpermi2var_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) { |
| 43 | ; CHECK-LABEL: test_int_x86_avx512_mask_vpermi2var_qi_512: |
| 44 | ; CHECK: ## BB#0: |
| 45 | ; CHECK-NEXT: kmovq %rdi, %k1 |
| 46 | ; CHECK-NEXT: vmovaps %zmm1, %zmm3 |
| 47 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm3 {%k1} |
| 48 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm1 |
| 49 | ; CHECK-NEXT: vpxord %zmm4, %zmm4, %zmm4 |
| 50 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm4 {%k1} {z} |
| 51 | ; CHECK-NEXT: vpaddb %zmm4, %zmm3, %zmm0 |
| 52 | ; CHECK-NEXT: vpaddb %zmm1, %zmm0, %zmm0 |
| 53 | ; CHECK-NEXT: retq |
| 54 | %res = call <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) |
| 55 | %res1 = call <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8> %x0, <64 x i8> zeroinitializer, <64 x i8> %x2, i64 %x3) |
| 56 | %res2 = call <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 -1) |
| 57 | %res3 = add <64 x i8> %res, %res1 |
| 58 | %res4 = add <64 x i8> %res3, %res2 |
| 59 | ret <64 x i8> %res4 |
| 60 | } |
| 61 | |
| 62 | declare <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i64) |
| 63 | |
| 64 | define <64 x i8>@test_int_x86_avx512_mask_vpermt2var_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) { |
| 65 | ; CHECK-LABEL: test_int_x86_avx512_mask_vpermt2var_qi_512: |
| 66 | ; CHECK: ## BB#0: |
| 67 | ; CHECK-NEXT: kmovq %rdi, %k1 |
| 68 | ; CHECK-NEXT: vmovaps %zmm1, %zmm3 |
| 69 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm3 {%k1} |
| 70 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm1 |
| 71 | ; CHECK-NEXT: vpxord %zmm4, %zmm4, %zmm4 |
| 72 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm4 {%k1} {z} |
| 73 | ; CHECK-NEXT: vpaddb %zmm4, %zmm3, %zmm0 |
| 74 | ; CHECK-NEXT: vpaddb %zmm1, %zmm0, %zmm0 |
| 75 | ; CHECK-NEXT: retq |
| 76 | %res = call <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) |
| 77 | %res1 = call <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> zeroinitializer, <64 x i8> %x2, i64 %x3) |
| 78 | %res2 = call <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 -1) |
| 79 | %res3 = add <64 x i8> %res, %res1 |
| 80 | %res4 = add <64 x i8> %res3, %res2 |
| 81 | ret <64 x i8> %res4 |
| 82 | } |
| 83 | |
| 84 | declare <64 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i64) |
| 85 | |
| 86 | define <64 x i8>@test_int_x86_avx512_maskz_vpermt2var_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) { |
| 87 | ; CHECK-LABEL: test_int_x86_avx512_maskz_vpermt2var_qi_512: |
| 88 | ; CHECK: ## BB#0: |
| 89 | ; CHECK-NEXT: kmovq %rdi, %k1 |
| 90 | ; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm1 {%k1} {z} |
| 91 | ; CHECK-NEXT: vmovaps %zmm1, %zmm0 |
| 92 | ; CHECK-NEXT: retq |
| 93 | %res = call <64 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) |
| 94 | ret <64 x i8> %res |
| 95 | } |