| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -mcpu=x86-64 -mattr=+avx -x86-experimental-vector-shuffle-lowering | FileCheck %s --check-prefix=ALL --check-prefix=AVX --check-prefix=AVX1 | 
|  | 2 | ; RUN: llc < %s -mcpu=x86-64 -mattr=+avx2 -x86-experimental-vector-shuffle-lowering | FileCheck %s --check-prefix=ALL --check-prefix=AVX --check-prefix=AVX2 | 
|  | 3 |  | 
|  | 4 | target triple = "x86_64-unknown-unknown" | 
|  | 5 |  | 
|  | 6 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 7 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 8 | ; AVX1:       # BB#0: | 
|  | 9 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 10 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
|  | 11 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 12 | ; AVX1-NEXT:    retq | 
|  | 13 | ; | 
|  | 14 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 15 | ; AVX2:       # BB#0: | 
|  | 16 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 17 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
|  | 18 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 19 | ; AVX2-NEXT:    retq | 
|  | 20 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 21 | ret <32 x i8> %shuffle | 
|  | 22 | } | 
|  | 23 |  | 
|  | 24 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 25 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00 | 
|  | 26 | ; AVX1:       # BB#0: | 
|  | 27 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 28 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 29 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] | 
|  | 30 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 31 | ; AVX1-NEXT:    retq | 
|  | 32 | ; | 
|  | 33 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00 | 
|  | 34 | ; AVX2:       # BB#0: | 
|  | 35 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 36 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 37 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] | 
|  | 38 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 39 | ; AVX2-NEXT:    retq | 
|  | 40 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 0> | 
|  | 41 | ret <32 x i8> %shuffle | 
|  | 42 | } | 
|  | 43 |  | 
|  | 44 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 45 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00 | 
|  | 46 | ; AVX1:       # BB#0: | 
|  | 47 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 48 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 49 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] | 
|  | 50 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 51 | ; AVX1-NEXT:    retq | 
|  | 52 | ; | 
|  | 53 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00 | 
|  | 54 | ; AVX2:       # BB#0: | 
|  | 55 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 56 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 57 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] | 
|  | 58 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 59 | ; AVX2-NEXT:    retq | 
|  | 60 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 2, i32 0, i32 0> | 
|  | 61 | ret <32 x i8> %shuffle | 
|  | 62 | } | 
|  | 63 |  | 
|  | 64 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_03_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 65 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_03_00_00_00 | 
|  | 66 | ; AVX1:       # BB#0: | 
|  | 67 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 68 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 69 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0] | 
|  | 70 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 71 | ; AVX1-NEXT:    retq | 
|  | 72 | ; | 
|  | 73 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_03_00_00_00 | 
|  | 74 | ; AVX2:       # BB#0: | 
|  | 75 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 76 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 77 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0] | 
|  | 78 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 79 | ; AVX2-NEXT:    retq | 
|  | 80 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 3, i32 0, i32 0, i32 0> | 
|  | 81 | ret <32 x i8> %shuffle | 
|  | 82 | } | 
|  | 83 |  | 
|  | 84 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_04_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 85 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_04_00_00_00_00 | 
|  | 86 | ; AVX1:       # BB#0: | 
|  | 87 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 88 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 89 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0] | 
|  | 90 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 91 | ; AVX1-NEXT:    retq | 
|  | 92 | ; | 
|  | 93 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_04_00_00_00_00 | 
|  | 94 | ; AVX2:       # BB#0: | 
|  | 95 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 96 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 97 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0] | 
|  | 98 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 99 | ; AVX2-NEXT:    retq | 
|  | 100 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 0, i32 0, i32 0, i32 0> | 
|  | 101 | ret <32 x i8> %shuffle | 
|  | 102 | } | 
|  | 103 |  | 
|  | 104 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_05_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 105 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_05_00_00_00_00_00 | 
|  | 106 | ; AVX1:       # BB#0: | 
|  | 107 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 108 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 109 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0] | 
|  | 110 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 111 | ; AVX1-NEXT:    retq | 
|  | 112 | ; | 
|  | 113 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_05_00_00_00_00_00 | 
|  | 114 | ; AVX2:       # BB#0: | 
|  | 115 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 116 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 117 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0] | 
|  | 118 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 119 | ; AVX2-NEXT:    retq | 
|  | 120 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 121 | ret <32 x i8> %shuffle | 
|  | 122 | } | 
|  | 123 |  | 
|  | 124 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_06_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 125 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_06_00_00_00_00_00_00 | 
|  | 126 | ; AVX1:       # BB#0: | 
|  | 127 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 128 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 129 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0] | 
|  | 130 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 131 | ; AVX1-NEXT:    retq | 
|  | 132 | ; | 
|  | 133 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_06_00_00_00_00_00_00 | 
|  | 134 | ; AVX2:       # BB#0: | 
|  | 135 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 136 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 137 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0] | 
|  | 138 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 139 | ; AVX2-NEXT:    retq | 
|  | 140 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 6, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 141 | ret <32 x i8> %shuffle | 
|  | 142 | } | 
|  | 143 |  | 
|  | 144 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 145 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00 | 
|  | 146 | ; AVX1:       # BB#0: | 
|  | 147 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 148 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 149 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0] | 
|  | 150 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 151 | ; AVX1-NEXT:    retq | 
|  | 152 | ; | 
|  | 153 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00 | 
|  | 154 | ; AVX2:       # BB#0: | 
|  | 155 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 156 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 157 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0] | 
|  | 158 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 159 | ; AVX2-NEXT:    retq | 
|  | 160 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 7, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 161 | ret <32 x i8> %shuffle | 
|  | 162 | } | 
|  | 163 |  | 
|  | 164 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 165 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00 | 
|  | 166 | ; AVX1:       # BB#0: | 
|  | 167 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 168 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 169 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0] | 
|  | 170 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 171 | ; AVX1-NEXT:    retq | 
|  | 172 | ; | 
|  | 173 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00 | 
|  | 174 | ; AVX2:       # BB#0: | 
|  | 175 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 176 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 177 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0] | 
|  | 178 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 179 | ; AVX2-NEXT:    retq | 
|  | 180 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 8, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 181 | ret <32 x i8> %shuffle | 
|  | 182 | } | 
|  | 183 |  | 
|  | 184 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_09_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 185 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_09_00_00_00_00_00_00_00_00_00 | 
|  | 186 | ; AVX1:       # BB#0: | 
|  | 187 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 188 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 189 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,9,0,0,0,0,0,0,0,0,0] | 
|  | 190 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 191 | ; AVX1-NEXT:    retq | 
|  | 192 | ; | 
|  | 193 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_09_00_00_00_00_00_00_00_00_00 | 
|  | 194 | ; AVX2:       # BB#0: | 
|  | 195 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 196 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 197 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,9,0,0,0,0,0,0,0,0,0] | 
|  | 198 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 199 | ; AVX2-NEXT:    retq | 
|  | 200 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 9, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 201 | ret <32 x i8> %shuffle | 
|  | 202 | } | 
|  | 203 |  | 
|  | 204 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_10_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 205 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_10_00_00_00_00_00_00_00_00_00_00 | 
|  | 206 | ; AVX1:       # BB#0: | 
|  | 207 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 208 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 209 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0] | 
|  | 210 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 211 | ; AVX1-NEXT:    retq | 
|  | 212 | ; | 
|  | 213 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_10_00_00_00_00_00_00_00_00_00_00 | 
|  | 214 | ; AVX2:       # BB#0: | 
|  | 215 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 216 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 217 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0] | 
|  | 218 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 219 | ; AVX2-NEXT:    retq | 
|  | 220 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 10, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 221 | ret <32 x i8> %shuffle | 
|  | 222 | } | 
|  | 223 |  | 
|  | 224 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_11_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 225 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_11_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 226 | ; AVX1:       # BB#0: | 
|  | 227 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 228 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 229 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 230 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 231 | ; AVX1-NEXT:    retq | 
|  | 232 | ; | 
|  | 233 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_11_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 234 | ; AVX2:       # BB#0: | 
|  | 235 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 236 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 237 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 238 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 239 | ; AVX2-NEXT:    retq | 
|  | 240 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 11, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 241 | ret <32 x i8> %shuffle | 
|  | 242 | } | 
|  | 243 |  | 
|  | 244 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_12_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 245 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_12_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 246 | ; AVX1:       # BB#0: | 
|  | 247 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 248 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 249 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 250 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 251 | ; AVX1-NEXT:    retq | 
|  | 252 | ; | 
|  | 253 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_12_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 254 | ; AVX2:       # BB#0: | 
|  | 255 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 256 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 257 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 258 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 259 | ; AVX2-NEXT:    retq | 
|  | 260 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 12, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 261 | ret <32 x i8> %shuffle | 
|  | 262 | } | 
|  | 263 |  | 
|  | 264 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_13_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 265 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_13_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 266 | ; AVX1:       # BB#0: | 
|  | 267 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 268 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 269 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,13,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 270 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 271 | ; AVX1-NEXT:    retq | 
|  | 272 | ; | 
|  | 273 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_13_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 274 | ; AVX2:       # BB#0: | 
|  | 275 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 276 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 277 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,13,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 278 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 279 | ; AVX2-NEXT:    retq | 
|  | 280 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 13, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 281 | ret <32 x i8> %shuffle | 
|  | 282 | } | 
|  | 283 |  | 
|  | 284 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 285 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 286 | ; AVX1:       # BB#0: | 
|  | 287 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 288 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 289 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 290 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 291 | ; AVX1-NEXT:    retq | 
|  | 292 | ; | 
|  | 293 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 294 | ; AVX2:       # BB#0: | 
|  | 295 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 296 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 297 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 298 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 299 | ; AVX2-NEXT:    retq | 
|  | 300 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 14, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 301 | ret <32 x i8> %shuffle | 
|  | 302 | } | 
|  | 303 |  | 
|  | 304 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 305 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 306 | ; AVX1:       # BB#0: | 
|  | 307 | ; AVX1-NEXT:    movl $15, %eax | 
|  | 308 | ; AVX1-NEXT:    vmovd %eax, %xmm1 | 
|  | 309 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 310 | ; AVX1-NEXT:    vpxor %xmm2, %xmm2, %xmm2 | 
|  | 311 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 312 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 313 | ; AVX1-NEXT:    retq | 
|  | 314 | ; | 
|  | 315 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 316 | ; AVX2:       # BB#0: | 
|  | 317 | ; AVX2-NEXT:    movl $15, %eax | 
|  | 318 | ; AVX2-NEXT:    vmovd %eax, %xmm1 | 
|  | 319 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 320 | ; AVX2-NEXT:    vpxor %xmm2, %xmm2, %xmm2 | 
|  | 321 | ; AVX2-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 322 | ; AVX2-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 323 | ; AVX2-NEXT:    retq | 
|  | 324 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 15, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 325 | ret <32 x i8> %shuffle | 
|  | 326 | } | 
|  | 327 |  | 
|  | 328 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 329 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 330 | ; AVX1:       # BB#0: | 
|  | 331 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 332 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 333 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 334 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0] | 
|  | 335 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],zero | 
|  | 336 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 337 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 338 | ; AVX1-NEXT:    retq | 
|  | 339 | ; | 
|  | 340 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 341 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 342 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 343 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm2 | 
|  | 344 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 345 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm0 | 
|  | 346 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm1 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 347 | ; AVX2-NEXT:    vpblendvb %ymm1, %ymm0, %ymm2, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 348 | ; AVX2-NEXT:    retq | 
|  | 349 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 350 | ret <32 x i8> %shuffle | 
|  | 351 | } | 
|  | 352 |  | 
|  | 353 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_17_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 354 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_17_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 355 | ; AVX1:       # BB#0: | 
|  | 356 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 357 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 358 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 359 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero | 
|  | 360 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0],zero,xmm0[0] | 
|  | 361 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 362 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 363 | ; AVX1-NEXT:    retq | 
|  | 364 | ; | 
|  | 365 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_17_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 366 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 367 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 368 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 369 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 370 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,1,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 371 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,0,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 372 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 373 | ; AVX2-NEXT:    retq | 
|  | 374 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 17, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 375 | ret <32 x i8> %shuffle | 
|  | 376 | } | 
|  | 377 |  | 
|  | 378 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_18_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 379 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_18_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 380 | ; AVX1:       # BB#0: | 
|  | 381 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 382 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 383 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 384 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[2],zero,zero | 
|  | 385 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0],zero,xmm0[0,0] | 
|  | 386 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 387 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 388 | ; AVX1-NEXT:    retq | 
|  | 389 | ; | 
|  | 390 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_18_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 391 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 392 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 393 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 394 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 395 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,2,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 396 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,128,0,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 397 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 398 | ; AVX2-NEXT:    retq | 
|  | 399 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 18, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 400 | ret <32 x i8> %shuffle | 
|  | 401 | } | 
|  | 402 |  | 
|  | 403 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_19_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 404 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_19_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 405 | ; AVX1:       # BB#0: | 
|  | 406 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 407 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 408 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 409 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[3],zero,zero,zero | 
|  | 410 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0],zero,xmm0[0,0,0] | 
|  | 411 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 412 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 413 | ; AVX1-NEXT:    retq | 
|  | 414 | ; | 
|  | 415 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_19_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 416 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 417 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 418 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 419 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 420 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,3,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 421 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,0,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 422 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 423 | ; AVX2-NEXT:    retq | 
|  | 424 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 19, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 425 | ret <32 x i8> %shuffle | 
|  | 426 | } | 
|  | 427 |  | 
|  | 428 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_20_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 429 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_20_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 430 | ; AVX1:       # BB#0: | 
|  | 431 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 432 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 433 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 434 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[4],zero,zero,zero,zero | 
|  | 435 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0],zero,xmm0[0,0,0,0] | 
|  | 436 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 437 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 438 | ; AVX1-NEXT:    retq | 
|  | 439 | ; | 
|  | 440 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_20_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 441 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 442 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 443 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 444 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 445 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,4,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 446 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,128,128,128,0,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 447 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 448 | ; AVX2-NEXT:    retq | 
|  | 449 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 20, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 450 | ret <32 x i8> %shuffle | 
|  | 451 | } | 
|  | 452 |  | 
|  | 453 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_21_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 454 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_21_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 455 | ; AVX1:       # BB#0: | 
|  | 456 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 457 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 458 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 459 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[5],zero,zero,zero,zero,zero | 
|  | 460 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0],zero,xmm0[0,0,0,0,0] | 
|  | 461 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 462 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 463 | ; AVX1-NEXT:    retq | 
|  | 464 | ; | 
|  | 465 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_21_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 466 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 467 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 468 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 469 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 470 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,5,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 471 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,128,128,0,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 472 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 473 | ; AVX2-NEXT:    retq | 
|  | 474 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 21, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 475 | ret <32 x i8> %shuffle | 
|  | 476 | } | 
|  | 477 |  | 
|  | 478 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_22_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 479 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_22_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 480 | ; AVX1:       # BB#0: | 
|  | 481 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 482 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 483 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 484 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[6],zero,zero,zero,zero,zero,zero | 
|  | 485 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0],zero,xmm0[0,0,0,0,0,0] | 
|  | 486 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 487 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 488 | ; AVX1-NEXT:    retq | 
|  | 489 | ; | 
|  | 490 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_22_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 491 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 492 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 493 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 494 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 495 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,u,6,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 496 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,128,0,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 497 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 498 | ; AVX2-NEXT:    retq | 
|  | 499 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 22, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 500 | ret <32 x i8> %shuffle | 
|  | 501 | } | 
|  | 502 |  | 
|  | 503 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_23_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 504 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_23_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 505 | ; AVX1:       # BB#0: | 
|  | 506 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 507 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 508 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 509 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,xmm2[7],zero,zero,zero,zero,zero,zero,zero | 
|  | 510 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0],zero,xmm0[0,0,0,0,0,0,0] | 
|  | 511 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 512 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 513 | ; AVX1-NEXT:    retq | 
|  | 514 | ; | 
|  | 515 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_23_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 516 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 517 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 518 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 519 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 520 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,7,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 521 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,128,0,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 522 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 523 | ; AVX2-NEXT:    retq | 
|  | 524 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 23, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 525 | ret <32 x i8> %shuffle | 
|  | 526 | } | 
|  | 527 |  | 
|  | 528 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_24_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 529 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_24_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 530 | ; AVX1:       # BB#0: | 
|  | 531 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 532 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 533 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 534 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,zero,xmm2[8],zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 535 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0],zero,xmm0[0,0,0,0,0,0,0,0] | 
|  | 536 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 537 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 538 | ; AVX1-NEXT:    retq | 
|  | 539 | ; | 
|  | 540 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_24_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 541 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 542 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 543 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 544 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 545 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,8,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 546 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,128,0,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 547 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 548 | ; AVX2-NEXT:    retq | 
|  | 549 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 24, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 550 | ret <32 x i8> %shuffle | 
|  | 551 | } | 
|  | 552 |  | 
|  | 553 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_25_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 554 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_25_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 555 | ; AVX1:       # BB#0: | 
|  | 556 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 557 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 558 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 559 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,zero,xmm2[9],zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 560 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0],zero,xmm0[0,0,0,0,0,0,0,0,0] | 
|  | 561 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 562 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 563 | ; AVX1-NEXT:    retq | 
|  | 564 | ; | 
|  | 565 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_25_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 566 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 567 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 568 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 569 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 570 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,9,u,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 571 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,128,0,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 572 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 573 | ; AVX2-NEXT:    retq | 
|  | 574 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 25, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 575 | ret <32 x i8> %shuffle | 
|  | 576 | } | 
|  | 577 |  | 
|  | 578 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_26_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 579 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_26_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 580 | ; AVX1:       # BB#0: | 
|  | 581 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 582 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 583 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 584 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,zero,xmm2[10],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 585 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0],zero,xmm0[0,0,0,0,0,0,0,0,0,0] | 
|  | 586 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 587 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 588 | ; AVX1-NEXT:    retq | 
|  | 589 | ; | 
|  | 590 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_26_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 591 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 592 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 593 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 594 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 595 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,10,u,u,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 596 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,128,0,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 597 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 598 | ; AVX2-NEXT:    retq | 
|  | 599 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 26, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 600 | ret <32 x i8> %shuffle | 
|  | 601 | } | 
|  | 602 |  | 
|  | 603 | define <32 x i8> @shuffle_v32i8_00_00_00_00_27_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 604 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_27_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 605 | ; AVX1:       # BB#0: | 
|  | 606 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 607 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 608 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 609 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,zero,xmm2[11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 610 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0],zero,xmm0[0,0,0,0,0,0,0,0,0,0,0] | 
|  | 611 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 612 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 613 | ; AVX1-NEXT:    retq | 
|  | 614 | ; | 
|  | 615 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_27_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 616 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 617 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 618 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 619 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 620 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,11,u,u,u,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 621 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,128,0,128,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 622 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 623 | ; AVX2-NEXT:    retq | 
|  | 624 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 27, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 625 | ret <32 x i8> %shuffle | 
|  | 626 | } | 
|  | 627 |  | 
|  | 628 | define <32 x i8> @shuffle_v32i8_00_00_00_28_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 629 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_28_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 630 | ; AVX1:       # BB#0: | 
|  | 631 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 632 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 633 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 634 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,zero,xmm2[12],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 635 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0],zero,xmm0[0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 636 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 637 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 638 | ; AVX1-NEXT:    retq | 
|  | 639 | ; | 
|  | 640 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_28_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 641 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 642 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 643 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 644 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 645 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,12,u,u,u,u,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 646 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,128,0,128,128,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 647 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 648 | ; AVX2-NEXT:    retq | 
|  | 649 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 28, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 650 | ret <32 x i8> %shuffle | 
|  | 651 | } | 
|  | 652 |  | 
|  | 653 | define <32 x i8> @shuffle_v32i8_00_00_29_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 654 | ; AVX1-LABEL: @shuffle_v32i8_00_00_29_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 655 | ; AVX1:       # BB#0: | 
|  | 656 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 657 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 658 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 659 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,zero,xmm2[13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 660 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0],zero,xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 661 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 662 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 663 | ; AVX1-NEXT:    retq | 
|  | 664 | ; | 
|  | 665 | ; AVX2-LABEL: @shuffle_v32i8_00_00_29_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 666 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 667 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 668 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 669 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 670 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,13,u,u,u,u,u,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 671 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,128,0,128,128,128,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 672 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 673 | ; AVX2-NEXT:    retq | 
|  | 674 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 29, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 675 | ret <32 x i8> %shuffle | 
|  | 676 | } | 
|  | 677 |  | 
|  | 678 | define <32 x i8> @shuffle_v32i8_00_30_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 679 | ; AVX1-LABEL: @shuffle_v32i8_00_30_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 680 | ; AVX1:       # BB#0: | 
|  | 681 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 682 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 683 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 684 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm2 = zero,xmm2[14],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 685 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0],zero,xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 686 | ; AVX1-NEXT:    vpor %xmm2, %xmm0, %xmm0 | 
|  | 687 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 688 | ; AVX1-NEXT:    retq | 
|  | 689 | ; | 
|  | 690 | ; AVX2-LABEL: @shuffle_v32i8_00_30_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 691 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 692 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 693 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm1 | 
|  | 694 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm0 = ymm0[2,3,0,1] | 
|  | 695 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,14,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
|  | 696 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [128,0,128,128,128,128,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 697 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 698 | ; AVX2-NEXT:    retq | 
|  | 699 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 30, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 700 | ret <32 x i8> %shuffle | 
|  | 701 | } | 
|  | 702 |  | 
|  | 703 | define <32 x i8> @shuffle_v32i8_31_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 704 | ; AVX1-LABEL: @shuffle_v32i8_31_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 705 | ; AVX1:       # BB#0: | 
|  | 706 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 707 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 708 | ; AVX1-NEXT:    movl $128, %eax | 
|  | 709 | ; AVX1-NEXT:    vmovd %eax, %xmm2 | 
|  | 710 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm2 | 
|  | 711 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 712 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero | 
|  | 713 | ; AVX1-NEXT:    vpor %xmm0, %xmm2, %xmm0 | 
|  | 714 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 715 | ; AVX1-NEXT:    retq | 
|  | 716 | ; | 
|  | 717 | ; AVX2-LABEL: @shuffle_v32i8_31_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 718 | ; AVX2:       # BB#0: | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 719 | ; AVX2-NEXT:    vperm2i128 {{.*}} # ymm1 = ymm0[2,3,0,1] | 
|  | 720 | ; AVX2-NEXT:    movl $15, %eax | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 721 | ; AVX2-NEXT:    vmovd %eax, %xmm2 | 
| Chandler Carruth | e91d68c | 2014-09-25 10:21:15 +0000 | [diff] [blame] | 722 | ; AVX2-NEXT:    vpxor %ymm3, %ymm3, %ymm3 | 
|  | 723 | ; AVX2-NEXT:    vinserti128 $0, %xmm2, %ymm3, %ymm2 | 
|  | 724 | ; AVX2-NEXT:    vpshufb %ymm2, %ymm1, %ymm1 | 
|  | 725 | ; AVX2-NEXT:    vpshufb %ymm3, %ymm0, %ymm0 | 
|  | 726 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [0,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 727 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 728 | ; AVX2-NEXT:    retq | 
|  | 729 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 31, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 730 | ret <32 x i8> %shuffle | 
|  | 731 | } | 
|  | 732 |  | 
|  | 733 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 734 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 735 | ; AVX1:       # BB#0: | 
|  | 736 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
|  | 737 | ; AVX1-NEXT:    vpxor %xmm2, %xmm2, %xmm2 | 
|  | 738 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 739 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 740 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 741 | ; AVX1-NEXT:    retq | 
|  | 742 | ; | 
|  | 743 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 744 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 745 | ; AVX2-NEXT:    vpxor %ymm1, %ymm1, %ymm1 | 
|  | 746 | ; AVX2-NEXT:    vpshufb %ymm1, %ymm0, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 747 | ; AVX2-NEXT:    retq | 
|  | 748 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 749 | ret <32 x i8> %shuffle | 
|  | 750 | } | 
|  | 751 |  | 
|  | 752 | define <32 x i8> @shuffle_v32i8_15_15_15_15_15_15_15_15_15_15_15_15_15_15_15_15_31_31_31_31_31_31_31_31_31_31_31_31_31_31_31_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 753 | ; AVX1-LABEL: @shuffle_v32i8_15_15_15_15_15_15_15_15_15_15_15_15_15_15_15_15_31_31_31_31_31_31_31_31_31_31_31_31_31_31_31_31 | 
|  | 754 | ; AVX1:       # BB#0: | 
|  | 755 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 756 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 757 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 758 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 759 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 760 | ; AVX1-NEXT:    retq | 
|  | 761 | ; | 
|  | 762 | ; AVX2-LABEL: @shuffle_v32i8_15_15_15_15_15_15_15_15_15_15_15_15_15_15_15_15_31_31_31_31_31_31_31_31_31_31_31_31_31_31_31_31 | 
|  | 763 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 764 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 765 | ; AVX2-NEXT:    retq | 
|  | 766 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31> | 
|  | 767 | ret <32 x i8> %shuffle | 
|  | 768 | } | 
|  | 769 |  | 
|  | 770 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_08_08_08_08_08_08_08_08_16_16_16_16_16_16_16_16_24_24_24_24_24_24_24_24(<32 x i8> %a, <32 x i8> %b) { | 
|  | 771 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_08_08_08_08_08_08_08_08_16_16_16_16_16_16_16_16_24_24_24_24_24_24_24_24 | 
|  | 772 | ; AVX1:       # BB#0: | 
|  | 773 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 774 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,0,0,0,0,0,0,8,8,8,8,8,8,8,8] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 775 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 776 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 777 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 778 | ; AVX1-NEXT:    retq | 
|  | 779 | ; | 
|  | 780 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_08_08_08_08_08_08_08_08_16_16_16_16_16_16_16_16_24_24_24_24_24_24_24_24 | 
|  | 781 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 782 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,8,8,8,8,8,8,8,8,16,16,16,16,16,16,16,16,24,24,24,24,24,24,24,24] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 783 | ; AVX2-NEXT:    retq | 
|  | 784 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24> | 
|  | 785 | ret <32 x i8> %shuffle | 
|  | 786 | } | 
|  | 787 |  | 
|  | 788 | define <32 x i8> @shuffle_v32i8_07_07_07_07_07_07_07_07_15_15_15_15_15_15_15_15_23_23_23_23_23_23_23_23_31_31_31_31_31_31_31_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 789 | ; AVX1-LABEL: @shuffle_v32i8_07_07_07_07_07_07_07_07_15_15_15_15_15_15_15_15_23_23_23_23_23_23_23_23_31_31_31_31_31_31_31_31 | 
|  | 790 | ; AVX1:       # BB#0: | 
|  | 791 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 792 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [7,7,7,7,7,7,7,7,15,15,15,15,15,15,15,15] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 793 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 794 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 795 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 796 | ; AVX1-NEXT:    retq | 
|  | 797 | ; | 
|  | 798 | ; AVX2-LABEL: @shuffle_v32i8_07_07_07_07_07_07_07_07_15_15_15_15_15_15_15_15_23_23_23_23_23_23_23_23_31_31_31_31_31_31_31_31 | 
|  | 799 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 800 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[7,7,7,7,7,7,7,7,15,15,15,15,15,15,15,15,23,23,23,23,23,23,23,23,31,31,31,31,31,31,31,31] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 801 | ; AVX2-NEXT:    retq | 
|  | 802 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31> | 
|  | 803 | ret <32 x i8> %shuffle | 
|  | 804 | } | 
|  | 805 |  | 
|  | 806 | define <32 x i8> @shuffle_v32i8_00_00_00_00_04_04_04_04_08_08_08_08_12_12_12_12_16_16_16_16_20_20_20_20_24_24_24_24_28_28_28_28(<32 x i8> %a, <32 x i8> %b) { | 
|  | 807 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_04_04_04_04_08_08_08_08_12_12_12_12_16_16_16_16_20_20_20_20_24_24_24_24_28_28_28_28 | 
|  | 808 | ; AVX1:       # BB#0: | 
|  | 809 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 810 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,0,0,4,4,4,4,8,8,8,8,12,12,12,12] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 811 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 812 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 813 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 814 | ; AVX1-NEXT:    retq | 
|  | 815 | ; | 
|  | 816 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_04_04_04_04_08_08_08_08_12_12_12_12_16_16_16_16_20_20_20_20_24_24_24_24_28_28_28_28 | 
|  | 817 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 818 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,4,4,4,4,8,8,8,8,12,12,12,12,16,16,16,16,20,20,20,20,24,24,24,24,28,28,28,28] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 819 | ; AVX2-NEXT:    retq | 
|  | 820 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 4, i32 4, i32 4, i32 4, i32 8, i32 8, i32 8, i32 8, i32 12, i32 12, i32 12, i32 12, i32 16, i32 16, i32 16, i32 16, i32 20, i32 20, i32 20, i32 20, i32 24, i32 24, i32 24, i32 24, i32 28, i32 28, i32 28, i32 28> | 
|  | 821 | ret <32 x i8> %shuffle | 
|  | 822 | } | 
|  | 823 |  | 
|  | 824 | define <32 x i8> @shuffle_v32i8_03_03_03_03_07_07_07_07_11_11_11_11_15_15_15_15_19_19_19_19_23_23_23_23_27_27_27_27_31_31_31_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 825 | ; AVX1-LABEL: @shuffle_v32i8_03_03_03_03_07_07_07_07_11_11_11_11_15_15_15_15_19_19_19_19_23_23_23_23_27_27_27_27_31_31_31_31 | 
|  | 826 | ; AVX1:       # BB#0: | 
|  | 827 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 828 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [3,3,3,3,7,7,7,7,11,11,11,11,15,15,15,15] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 829 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 830 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 831 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 832 | ; AVX1-NEXT:    retq | 
|  | 833 | ; | 
|  | 834 | ; AVX2-LABEL: @shuffle_v32i8_03_03_03_03_07_07_07_07_11_11_11_11_15_15_15_15_19_19_19_19_23_23_23_23_27_27_27_27_31_31_31_31 | 
|  | 835 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 836 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[3,3,3,3,7,7,7,7,11,11,11,11,15,15,15,15,19,19,19,19,23,23,23,23,27,27,27,27,31,31,31,31] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 837 | ; AVX2-NEXT:    retq | 
|  | 838 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 3, i32 3, i32 3, i32 3, i32 7, i32 7, i32 7, i32 7, i32 11, i32 11, i32 11, i32 11, i32 15, i32 15, i32 15, i32 15, i32 19, i32 19, i32 19, i32 19, i32 23, i32 23, i32 23, i32 23, i32 27, i32 27, i32 27, i32 27, i32 31, i32 31, i32 31, i32 31> | 
|  | 839 | ret <32 x i8> %shuffle | 
|  | 840 | } | 
|  | 841 |  | 
|  | 842 | define <32 x i8> @shuffle_v32i8_00_00_02_02_04_04_06_06_08_08_10_10_12_12_14_14_16_16_18_18_20_20_22_22_24_24_26_26_28_28_30_30(<32 x i8> %a, <32 x i8> %b) { | 
|  | 843 | ; AVX1-LABEL: @shuffle_v32i8_00_00_02_02_04_04_06_06_08_08_10_10_12_12_14_14_16_16_18_18_20_20_22_22_24_24_26_26_28_28_30_30 | 
|  | 844 | ; AVX1:       # BB#0: | 
|  | 845 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 846 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,2,2,4,4,6,6,8,8,10,10,12,12,14,14] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 847 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 848 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 849 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 850 | ; AVX1-NEXT:    retq | 
|  | 851 | ; | 
|  | 852 | ; AVX2-LABEL: @shuffle_v32i8_00_00_02_02_04_04_06_06_08_08_10_10_12_12_14_14_16_16_18_18_20_20_22_22_24_24_26_26_28_28_30_30 | 
|  | 853 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 854 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,2,2,4,4,6,6,8,8,10,10,12,12,14,14,16,16,18,18,20,20,22,22,24,24,26,26,28,28,30,30] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 855 | ; AVX2-NEXT:    retq | 
|  | 856 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 2, i32 2, i32 4, i32 4, i32 6, i32 6, i32 8, i32 8, i32 10, i32 10, i32 12, i32 12, i32 14, i32 14, i32 16, i32 16, i32 18, i32 18, i32 20, i32 20, i32 22, i32 22, i32 24, i32 24, i32 26, i32 26, i32 28, i32 28, i32 30, i32 30> | 
|  | 857 | ret <32 x i8> %shuffle | 
|  | 858 | } | 
|  | 859 |  | 
|  | 860 | define <32 x i8> @shuffle_v32i8_01_01_03_03_05_05_07_07_09_09_11_11_13_13_15_15_17_17_19_19_21_21_23_23_25_25_27_27_29_29_31_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 861 | ; AVX1-LABEL: @shuffle_v32i8_01_01_03_03_05_05_07_07_09_09_11_11_13_13_15_15_17_17_19_19_21_21_23_23_25_25_27_27_29_29_31_31 | 
|  | 862 | ; AVX1:       # BB#0: | 
|  | 863 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 864 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [1,1,3,3,5,5,7,7,9,9,11,11,13,13,15,15] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 865 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 866 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 867 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 868 | ; AVX1-NEXT:    retq | 
|  | 869 | ; | 
|  | 870 | ; AVX2-LABEL: @shuffle_v32i8_01_01_03_03_05_05_07_07_09_09_11_11_13_13_15_15_17_17_19_19_21_21_23_23_25_25_27_27_29_29_31_31 | 
|  | 871 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 872 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[1,1,3,3,5,5,7,7,9,9,11,11,13,13,15,15,17,17,19,19,21,21,23,23,25,25,27,27,29,29,31,31] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 873 | ; AVX2-NEXT:    retq | 
|  | 874 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 1, i32 1, i32 3, i32 3, i32 5, i32 5, i32 7, i32 7, i32 9, i32 9, i32 11, i32 11, i32 13, i32 13, i32 15, i32 15, i32 17, i32 17, i32 19, i32 19, i32 21, i32 21, i32 23, i32 23, i32 25, i32 25, i32 27, i32 27, i32 29, i32 29, i32 31, i32 31> | 
|  | 875 | ret <32 x i8> %shuffle | 
|  | 876 | } | 
|  | 877 |  | 
|  | 878 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 879 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00 | 
|  | 880 | ; AVX1:       # BB#0: | 
|  | 881 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] | 
|  | 882 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 883 | ; AVX1-NEXT:    retq | 
|  | 884 | ; | 
|  | 885 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00 | 
|  | 886 | ; AVX2:       # BB#0: | 
|  | 887 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] | 
|  | 888 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 889 | ; AVX2-NEXT:    retq | 
|  | 890 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 0> | 
|  | 891 | ret <32 x i8> %shuffle | 
|  | 892 | } | 
|  | 893 |  | 
|  | 894 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 895 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00 | 
|  | 896 | ; AVX1:       # BB#0: | 
|  | 897 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] | 
|  | 898 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 899 | ; AVX1-NEXT:    retq | 
|  | 900 | ; | 
|  | 901 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00 | 
|  | 902 | ; AVX2:       # BB#0: | 
|  | 903 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] | 
|  | 904 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 905 | ; AVX2-NEXT:    retq | 
|  | 906 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 2, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 2, i32 0, i32 0> | 
|  | 907 | ret <32 x i8> %shuffle | 
|  | 908 | } | 
|  | 909 |  | 
|  | 910 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 911 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00 | 
|  | 912 | ; AVX1:       # BB#0: | 
|  | 913 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0] | 
|  | 914 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 915 | ; AVX1-NEXT:    retq | 
|  | 916 | ; | 
|  | 917 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00 | 
|  | 918 | ; AVX2:       # BB#0: | 
|  | 919 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0] | 
|  | 920 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 921 | ; AVX2-NEXT:    retq | 
|  | 922 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 7, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 7, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 923 | ret <32 x i8> %shuffle | 
|  | 924 | } | 
|  | 925 |  | 
|  | 926 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 927 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00 | 
|  | 928 | ; AVX1:       # BB#0: | 
|  | 929 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0] | 
|  | 930 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 931 | ; AVX1-NEXT:    retq | 
|  | 932 | ; | 
|  | 933 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00 | 
|  | 934 | ; AVX2:       # BB#0: | 
|  | 935 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0] | 
|  | 936 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 937 | ; AVX2-NEXT:    retq | 
|  | 938 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 8, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 8, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 939 | ret <32 x i8> %shuffle | 
|  | 940 | } | 
|  | 941 |  | 
|  | 942 | define <32 x i8> @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 943 | ; AVX1-LABEL: @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 944 | ; AVX1:       # BB#0: | 
|  | 945 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 946 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 947 | ; AVX1-NEXT:    retq | 
|  | 948 | ; | 
|  | 949 | ; AVX2-LABEL: @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 950 | ; AVX2:       # BB#0: | 
|  | 951 | ; AVX2-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 952 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 953 | ; AVX2-NEXT:    retq | 
|  | 954 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 14, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 14, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 955 | ret <32 x i8> %shuffle | 
|  | 956 | } | 
|  | 957 |  | 
|  | 958 | define <32 x i8> @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00(<32 x i8> %a, <32 x i8> %b) { | 
|  | 959 | ; AVX1-LABEL: @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 960 | ; AVX1:       # BB#0: | 
|  | 961 | ; AVX1-NEXT:    movl $15, %eax | 
|  | 962 | ; AVX1-NEXT:    vmovd %eax, %xmm1 | 
|  | 963 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
|  | 964 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 965 | ; AVX1-NEXT:    retq | 
|  | 966 | ; | 
|  | 967 | ; AVX2-LABEL: @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00 | 
|  | 968 | ; AVX2:       # BB#0: | 
|  | 969 | ; AVX2-NEXT:    movl $15, %eax | 
|  | 970 | ; AVX2-NEXT:    vmovd %eax, %xmm1 | 
|  | 971 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
|  | 972 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 973 | ; AVX2-NEXT:    retq | 
|  | 974 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 15, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 15, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0> | 
|  | 975 | ret <32 x i8> %shuffle | 
|  | 976 | } | 
|  | 977 |  | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 978 | define <32 x i8> @shuffle_v32i8_00_33_02_35_04_37_06_39_08_41_10_43_12_45_14_47_16_49_18_51_20_53_22_55_24_57_26_59_28_61_30_63(<32 x i8> %a, <32 x i8> %b) { | 
|  | 979 | ; AVX1-LABEL: @shuffle_v32i8_00_33_02_35_04_37_06_39_08_41_10_43_12_45_14_47_16_49_18_51_20_53_22_55_24_57_26_59_28_61_30_63 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 980 | ; AVX1:       # BB#0: | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 981 | ; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2 | 
|  | 982 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm3 = <1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 983 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm2, %xmm2 | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 984 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm4 | 
|  | 985 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm5 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 986 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm4, %xmm4 | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 987 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7] | 
|  | 988 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm1, %xmm1 | 
|  | 989 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm0, %xmm0 | 
|  | 990 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 991 | ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0 | 
|  | 992 | ; AVX1-NEXT:    retq | 
|  | 993 | ; | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 994 | ; AVX2-LABEL: @shuffle_v32i8_00_33_02_35_04_37_06_39_08_41_10_43_12_45_14_47_16_49_18_51_20_53_22_55_24_57_26_59_28_61_30_63 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 995 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 996 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128] | 
|  | 997 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 998 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 999 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 33, i32 2, i32 35, i32 4, i32 37, i32 6, i32 39, i32 8, i32 41, i32 10, i32 43, i32 12, i32 45, i32 14, i32 47, i32 16, i32 49, i32 18, i32 51, i32 20, i32 53, i32 22, i32 55, i32 24, i32 57, i32 26, i32 59, i32 28, i32 61, i32 30, i32 63> | 
|  | 1000 | ret <32 x i8> %shuffle | 
|  | 1001 | } | 
|  | 1002 |  | 
|  | 1003 | define <32 x i8> @shuffle_v32i8_32_01_34_03_36_05_38_07_40_09_42_11_44_13_46_15_48_17_50_19_52_21_54_23_56_25_58_27_60_29_62_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1004 | ; AVX1-LABEL: @shuffle_v32i8_32_01_34_03_36_05_38_07_40_09_42_11_44_13_46_15_48_17_50_19_52_21_54_23_56_25_58_27_60_29_62_31 | 
|  | 1005 | ; AVX1:       # BB#0: | 
|  | 1006 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 1007 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm3 = <1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u> | 
|  | 1008 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm2, %xmm2 | 
|  | 1009 | ; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm4 | 
|  | 1010 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm5 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u> | 
|  | 1011 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm4, %xmm4 | 
|  | 1012 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7] | 
|  | 1013 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm0, %xmm0 | 
|  | 1014 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm1, %xmm1 | 
|  | 1015 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7] | 
|  | 1016 | ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0 | 
|  | 1017 | ; AVX1-NEXT:    retq | 
|  | 1018 | ; | 
|  | 1019 | ; AVX2-LABEL: @shuffle_v32i8_32_01_34_03_36_05_38_07_40_09_42_11_44_13_46_15_48_17_50_19_52_21_54_23_56_25_58_27_60_29_62_31 | 
|  | 1020 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1021 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128] | 
|  | 1022 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0 | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 1023 | ; AVX2-NEXT:    retq | 
|  | 1024 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 32, i32 1, i32 34, i32 3, i32 36, i32 5, i32 38, i32 7, i32 40, i32 9, i32 42, i32 11, i32 44, i32 13, i32 46, i32 15, i32 48, i32 17, i32 50, i32 19, i32 52, i32 21, i32 54, i32 23, i32 56, i32 25, i32 58, i32 27, i32 60, i32 29, i32 62, i32 31> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1025 | ret <32 x i8> %shuffle | 
|  | 1026 | } | 
|  | 1027 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1028 | define <32 x i8> @shuffle_v32i8_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1029 | ; AVX1-LABEL: @shuffle_v32i8_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1030 | ; AVX1:       # BB#0: | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1031 | ; AVX1-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 1032 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1033 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1034 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 1035 | ; AVX1-NEXT:    retq | 
|  | 1036 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1037 | ; AVX2-LABEL: @shuffle_v32i8_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1038 | ; AVX2:       # BB#0: | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1039 | ; AVX2-NEXT:    vpxor %xmm1, %xmm1, %xmm1 | 
|  | 1040 | ; AVX2-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1041 | ; AVX2-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1042 | ; AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | 1043 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1044 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1045 | ret <32 x i8> %shuffle | 
|  | 1046 | } | 
|  | 1047 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1048 | define <32 x i8> @shuffle_v32i8_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_16_48_16_48_16_48_16_48_16_48_16_48_16_48_16_48(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1049 | ; AVX1-LABEL: @shuffle_v32i8_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_16_48_16_48_16_48_16_48_16_48_16_48_16_48_16_48 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1050 | ; AVX1:       # BB#0: | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1051 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
|  | 1052 | ; AVX1-NEXT:    vpxor %xmm2, %xmm2, %xmm2 | 
|  | 1053 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1054 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7] | 
|  | 1055 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1056 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1057 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1058 | ; AVX1-NEXT:    retq | 
|  | 1059 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1060 | ; AVX2-LABEL: @shuffle_v32i8_00_32_00_32_00_32_00_32_00_32_00_32_00_32_00_32_16_48_16_48_16_48_16_48_16_48_16_48_16_48_16_48 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1061 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1062 | ; AVX2-NEXT:    vpxor %ymm2, %ymm2, %ymm2 | 
|  | 1063 | ; AVX2-NEXT:    vpshufb %ymm2, %ymm1, %ymm1 | 
|  | 1064 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,16,17,16,17,16,17,16,17,16,17,16,17,16,17,16,17] | 
|  | 1065 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128] | 
|  | 1066 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1067 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1068 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 0, i32 32, i32 16, i32 48, i32 16, i32 48, i32 16, i32 48, i32 16, i32 48, i32 16, i32 48, i32 16, i32 48, i32 16, i32 48, i32 16, i32 48> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1069 | ret <32 x i8> %shuffle | 
|  | 1070 | } | 
|  | 1071 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1072 | define <32 x i8> @shuffle_v32i8_32_32_32_32_32_32_32_32_08_09_10_11_12_13_14_15_48_48_48_48_48_48_48_48_24_25_26_27_28_29_30_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1073 | ; AVX1-LABEL: @shuffle_v32i8_32_32_32_32_32_32_32_32_08_09_10_11_12_13_14_15_48_48_48_48_48_48_48_48_24_25_26_27_28_29_30_31 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1074 | ; AVX1:       # BB#0: | 
|  | 1075 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1076 | ; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3 | 
|  | 1077 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
|  | 1078 | ; AVX1-NEXT:    vpshuflw {{.*}} # xmm3 = xmm3[0,0,0,0,4,5,6,7] | 
|  | 1079 | ; AVX1-NEXT:    vpblendw {{.*}} # xmm2 = xmm3[0,1,2,3],xmm2[4,5,6,7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1080 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
|  | 1081 | ; AVX1-NEXT:    vpshuflw {{.*}} # xmm1 = xmm1[0,0,0,0,4,5,6,7] | 
|  | 1082 | ; AVX1-NEXT:    vpblendw {{.*}} # xmm0 = xmm1[0,1,2,3],xmm0[4,5,6,7] | 
|  | 1083 | ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0 | 
|  | 1084 | ; AVX1-NEXT:    retq | 
|  | 1085 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1086 | ; AVX2-LABEL: @shuffle_v32i8_32_32_32_32_32_32_32_32_08_09_10_11_12_13_14_15_48_48_48_48_48_48_48_48_24_25_26_27_28_29_30_31 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1087 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1088 | ; AVX2-NEXT:    vpxor %ymm2, %ymm2, %ymm2 | 
|  | 1089 | ; AVX2-NEXT:    vpshufb %ymm2, %ymm1, %ymm1 | 
|  | 1090 | ; AVX2-NEXT:    vpblendd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1091 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1092 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1093 | ret <32 x i8> %shuffle | 
|  | 1094 | } | 
|  | 1095 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1096 | define <32 x i8> @shuffle_v32i8_39_38_37_36_35_34_33_32_15_14_13_12_11_10_09_08_55_54_53_52_51_50_49_48_31_30_29_28_27_26_25_24(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1097 | ; AVX1-LABEL: @shuffle_v32i8_39_38_37_36_35_34_33_32_15_14_13_12_11_10_09_08_55_54_53_52_51_50_49_48_31_30_29_28_27_26_25_24 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1098 | ; AVX1:       # BB#0: | 
|  | 1099 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1100 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm3 = <u,u,u,u,u,u,u,u,15,14,13,12,11,10,9,8> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1101 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm2, %xmm2 | 
|  | 1102 | ; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm4 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1103 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm5 = <7,6,5,4,3,2,1,0,u,u,u,u,u,u,u,u> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1104 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm4, %xmm4 | 
|  | 1105 | ; AVX1-NEXT:    vpblendw {{.*}} # xmm2 = xmm4[0,1,2,3],xmm2[4,5,6,7] | 
|  | 1106 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm0, %xmm0 | 
|  | 1107 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm1, %xmm1 | 
|  | 1108 | ; AVX1-NEXT:    vpblendw {{.*}} # xmm0 = xmm1[0,1,2,3],xmm0[4,5,6,7] | 
|  | 1109 | ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0 | 
|  | 1110 | ; AVX1-NEXT:    retq | 
|  | 1111 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1112 | ; AVX2-LABEL: @shuffle_v32i8_39_38_37_36_35_34_33_32_15_14_13_12_11_10_09_08_55_54_53_52_51_50_49_48_31_30_29_28_27_26_25_24 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1113 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1114 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,15,14,13,12,11,10,9,8,u,u,u,u,u,u,u,u,31,30,29,28,27,26,25,24] | 
|  | 1115 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm1 = ymm1[7,6,5,4,3,2,1,0,u,u,u,u,u,u,u,u,23,22,21,20,19,18,17,16,u,u,u,u,u,u,u,u] | 
|  | 1116 | ; AVX2-NEXT:    vpblendd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1117 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1118 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 39, i32 38, i32 37, i32 36, i32 35, i32 34, i32 33, i32 32, i32 15, i32 14, i32 13, i32 12, i32 11, i32 10, i32 9, i32 8, i32 55, i32 54, i32 53, i32 52, i32 51, i32 50, i32 49, i32 48, i32 31, i32 30, i32 29, i32 28, i32 27, i32 26, i32 25, i32 24> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1119 | ret <32 x i8> %shuffle | 
|  | 1120 | } | 
|  | 1121 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1122 | define <32 x i8> @shuffle_v32i8_39_38_37_36_35_34_33_32_07_06_05_04_03_02_01_00_55_54_53_52_51_50_49_48_23_22_21_20_19_18_17_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1123 | ; AVX1-LABEL: @shuffle_v32i8_39_38_37_36_35_34_33_32_07_06_05_04_03_02_01_00_55_54_53_52_51_50_49_48_23_22_21_20_19_18_17_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1124 | ; AVX1:       # BB#0: | 
|  | 1125 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1126 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm3 = <u,u,u,u,u,u,u,u,7,6,5,4,3,2,1,0> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1127 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm2, %xmm2 | 
|  | 1128 | ; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm4 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1129 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm5 = <7,6,5,4,3,2,1,0,u,u,u,u,u,u,u,u> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1130 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm4, %xmm4 | 
|  | 1131 | ; AVX1-NEXT:    vpblendw {{.*}} # xmm2 = xmm4[0,1,2,3],xmm2[4,5,6,7] | 
|  | 1132 | ; AVX1-NEXT:    vpshufb %xmm3, %xmm0, %xmm0 | 
|  | 1133 | ; AVX1-NEXT:    vpshufb %xmm5, %xmm1, %xmm1 | 
|  | 1134 | ; AVX1-NEXT:    vpblendw {{.*}} # xmm0 = xmm1[0,1,2,3],xmm0[4,5,6,7] | 
|  | 1135 | ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0 | 
|  | 1136 | ; AVX1-NEXT:    retq | 
|  | 1137 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1138 | ; AVX2-LABEL: @shuffle_v32i8_39_38_37_36_35_34_33_32_07_06_05_04_03_02_01_00_55_54_53_52_51_50_49_48_23_22_21_20_19_18_17_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1139 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1140 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,u,u,u,u,u,u,u,7,6,5,4,3,2,1,0,u,u,u,u,u,u,u,u,23,22,21,20,19,18,17,16] | 
|  | 1141 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm1 = ymm1[7,6,5,4,3,2,1,0,u,u,u,u,u,u,u,u,23,22,21,20,19,18,17,16,u,u,u,u,u,u,u,u] | 
|  | 1142 | ; AVX2-NEXT:    vpblendd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1143 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1144 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 39, i32 38, i32 37, i32 36, i32 35, i32 34, i32 33, i32 32, i32 7, i32 6, i32 5, i32 4, i32 3, i32 2, i32 1, i32 0, i32 55, i32 54, i32 53, i32 52, i32 51, i32 50, i32 49, i32 48, i32 23, i32 22, i32 21, i32 20, i32 19, i32 18, i32 17, i32 16> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1145 | ret <32 x i8> %shuffle | 
|  | 1146 | } | 
|  | 1147 |  | 
|  | 1148 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_17_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1149 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_17_16 | 
|  | 1150 | ; AVX1:       # BB#0: | 
|  | 1151 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 1152 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1153 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1154 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 1155 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1156 | ; AVX1-NEXT:    retq | 
|  | 1157 | ; | 
|  | 1158 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_17_16 | 
|  | 1159 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1160 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,16,16,16,16,16,16,16,16,16,16,16,16,16,16,17,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1161 | ; AVX2-NEXT:    retq | 
|  | 1162 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 16> | 
|  | 1163 | ret <32 x i8> %shuffle | 
|  | 1164 | } | 
|  | 1165 |  | 
|  | 1166 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_18_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1167 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_18_16_16 | 
|  | 1168 | ; AVX1:       # BB#0: | 
|  | 1169 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 1170 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1171 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1172 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 1173 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1174 | ; AVX1-NEXT:    retq | 
|  | 1175 | ; | 
|  | 1176 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_18_16_16 | 
|  | 1177 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1178 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,16,16,16,16,16,16,16,16,16,16,16,16,16,18,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1179 | ; AVX2-NEXT:    retq | 
|  | 1180 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 2, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 18, i32 16, i32 16> | 
|  | 1181 | ret <32 x i8> %shuffle | 
|  | 1182 | } | 
|  | 1183 |  | 
|  | 1184 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_23_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1185 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_23_16_16_16_16_16_16_16 | 
|  | 1186 | ; AVX1:       # BB#0: | 
|  | 1187 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 1188 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1189 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1190 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 1191 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1192 | ; AVX1-NEXT:    retq | 
|  | 1193 | ; | 
|  | 1194 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_23_16_16_16_16_16_16_16 | 
|  | 1195 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1196 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,16,16,16,16,16,16,16,16,23,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1197 | ; AVX2-NEXT:    retq | 
|  | 1198 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 7, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 23, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1199 | ret <32 x i8> %shuffle | 
|  | 1200 | } | 
|  | 1201 |  | 
|  | 1202 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_24_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1203 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_24_16_16_16_16_16_16_16_16 | 
|  | 1204 | ; AVX1:       # BB#0: | 
|  | 1205 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 1206 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1207 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1208 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 1209 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1210 | ; AVX1-NEXT:    retq | 
|  | 1211 | ; | 
|  | 1212 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_24_16_16_16_16_16_16_16_16 | 
|  | 1213 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1214 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,16,16,16,16,16,16,16,24,16,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1215 | ; AVX2-NEXT:    retq | 
|  | 1216 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 8, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 24, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1217 | ret <32 x i8> %shuffle | 
|  | 1218 | } | 
|  | 1219 |  | 
|  | 1220 | define <32 x i8> @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_30_16_16_16_16_16_16_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1221 | ; AVX1-LABEL: @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_30_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1222 | ; AVX1:       # BB#0: | 
|  | 1223 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | e7e9c04 | 2014-09-24 09:39:41 +0000 | [diff] [blame] | 1224 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = [0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1225 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1226 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
|  | 1227 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1228 | ; AVX1-NEXT:    retq | 
|  | 1229 | ; | 
|  | 1230 | ; AVX2-LABEL: @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_30_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1231 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1232 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,30,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1233 | ; AVX2-NEXT:    retq | 
|  | 1234 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 14, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 30, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1235 | ret <32 x i8> %shuffle | 
|  | 1236 | } | 
|  | 1237 |  | 
|  | 1238 | define <32 x i8> @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_31_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1239 | ; AVX1-LABEL: @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_31_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1240 | ; AVX1:       # BB#0: | 
|  | 1241 | ; AVX1-NEXT:    movl $15, %eax | 
|  | 1242 | ; AVX1-NEXT:    vmovd %eax, %xmm1 | 
|  | 1243 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2 | 
|  | 1244 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm2, %xmm2 | 
|  | 1245 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm0 | 
|  | 1246 | ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0 | 
|  | 1247 | ; AVX1-NEXT:    retq | 
|  | 1248 | ; | 
|  | 1249 | ; AVX2-LABEL: @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_31_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1250 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1251 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1252 | ; AVX2-NEXT:    retq | 
|  | 1253 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 15, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 31, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1254 | ret <32 x i8> %shuffle | 
|  | 1255 | } | 
|  | 1256 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1257 | define <32 x i8> @shuffle_v32i8_00_32_01_33_02_34_03_35_04_36_05_37_06_38_07_39_16_48_17_49_18_50_19_51_20_52_21_53_22_54_23_55(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1258 | ; AVX1-LABEL: @shuffle_v32i8_00_32_01_33_02_34_03_35_04_36_05_37_06_38_07_39_16_48_17_49_18_50_19_51_20_52_21_53_22_54_23_55 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1259 | ; AVX1:       # BB#0: | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1260 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
|  | 1261 | ; AVX1-NEXT:    vpmovzxbw %xmm1, %xmm1 | 
|  | 1262 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u> | 
|  | 1263 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1264 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7] | 
|  | 1265 | ; AVX1-NEXT:    vpmovzxbw %xmm0, %xmm0 | 
|  | 1266 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1267 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1268 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1269 | ; AVX1-NEXT:    retq | 
|  | 1270 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1271 | ; AVX2-LABEL: @shuffle_v32i8_00_32_01_33_02_34_03_35_04_36_05_37_06_38_07_39_16_48_17_49_18_50_19_51_20_52_21_53_22_54_23_55 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1272 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1273 | ; AVX2-NEXT:    vpunpcklbw {{.*}} # ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1274 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1275 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 32, i32 1, i32 33, i32 2, i32 34, i32 3, i32 35, i32 4, i32 36, i32 5, i32 37, i32 6, i32 38, i32 7, i32 39, i32 16, i32 48, i32 17, i32 49, i32 18, i32 50, i32 19, i32 51, i32 20, i32 52, i32 21, i32 53, i32 22, i32 54, i32 23, i32 55> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1276 | ret <32 x i8> %shuffle | 
|  | 1277 | } | 
|  | 1278 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1279 | define <32 x i8> @shuffle_v32i8_08_40_09_41_10_42_11_43_12_44_13_45_14_46_15_47_24_56_25_57_26_58_27_59_28_60_29_61_30_62_31_63(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1280 | ; AVX1-LABEL: @shuffle_v32i8_08_40_09_41_10_42_11_43_12_44_13_45_14_46_15_47_24_56_25_57_26_58_27_59_28_60_29_61_30_62_31_63 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1281 | ; AVX1:       # BB#0: | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1282 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
|  | 1283 | ; AVX1-NEXT:    vmovdqa {{.*}} # xmm2 = <8,9,10,11,12,13,14,15,u,u,u,u,u,u,u,u> | 
|  | 1284 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm1, %xmm1 | 
|  | 1285 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7] | 
|  | 1286 | ; AVX1-NEXT:    vpshufb %xmm2, %xmm0, %xmm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1287 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1288 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1289 | ; AVX1-NEXT:    retq | 
|  | 1290 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1291 | ; AVX2-LABEL: @shuffle_v32i8_08_40_09_41_10_42_11_43_12_44_13_45_14_46_15_47_24_56_25_57_26_58_27_59_28_60_29_61_30_62_31_63 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1292 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1293 | ; AVX2-NEXT:    vpunpckhbw {{.*}} # ymm0 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1294 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1295 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 8, i32 40, i32 9, i32 41, i32 10, i32 42, i32 11, i32 43, i32 12, i32 44, i32 13, i32 45, i32 14, i32 46, i32 15, i32 47, i32 24, i32 56, i32 25, i32 57, i32 26, i32 58, i32 27, i32 59, i32 28, i32 60, i32 29, i32 61, i32 30, i32 62, i32 31, i32 63> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1296 | ret <32 x i8> %shuffle | 
|  | 1297 | } | 
|  | 1298 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1299 | define <32 x i8> @shuffle_v32i8_00_32_01_33_02_34_03_35_04_36_05_37_06_38_07_39_24_56_25_57_26_58_27_59_28_60_29_61_30_62_31_63(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1300 | ; AVX1-LABEL: @shuffle_v32i8_00_32_01_33_02_34_03_35_04_36_05_37_06_38_07_39_24_56_25_57_26_58_27_59_28_60_29_61_30_62_31_63 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1301 | ; AVX1:       # BB#0: | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1302 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
|  | 1303 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm1[8,9,10,11,12,13,14,15,u,u,u,u,u,u,u,u] | 
|  | 1304 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7] | 
|  | 1305 | ; AVX1-NEXT:    vpmovzxbw %xmm0, %xmm0 | 
|  | 1306 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u] | 
|  | 1307 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
|  | 1308 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1309 | ; AVX1-NEXT:    retq | 
|  | 1310 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1311 | ; AVX2-LABEL: @shuffle_v32i8_00_32_01_33_02_34_03_35_04_36_05_37_06_38_07_39_24_56_25_57_26_58_27_59_28_60_29_61_30_62_31_63 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1312 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1313 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,u,1,u,2,u,3,u,4,u,5,u,6,u,7,u,24,u,25,u,26,u,27,u,28,u,29,u,30,u,31,u] | 
|  | 1314 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm1 = ymm1[u,0,u,1,u,2,u,3,u,4,u,5,u,6,u,7,u,24,u,25,u,26,u,27,u,28,u,29,u,30,u,31] | 
|  | 1315 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128] | 
|  | 1316 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1317 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1318 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 32, i32 1, i32 33, i32 2, i32 34, i32 3, i32 35, i32 4, i32 36, i32 5, i32 37, i32 6, i32 38, i32 7, i32 39, i32 24, i32 56, i32 25, i32 57, i32 26, i32 58, i32 27, i32 59, i32 28, i32 60, i32 29, i32 61, i32 30, i32 62, i32 31, i32 63> | 
|  | 1319 | ret <32 x i8> %shuffle | 
|  | 1320 | } | 
|  | 1321 |  | 
|  | 1322 | define <32 x i8> @shuffle_v32i8_08_40_09_41_10_42_11_43_12_44_13_45_14_46_15_47_16_48_17_49_18_50_19_51_20_52_21_53_22_54_23_55(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1323 | ; AVX1-LABEL: @shuffle_v32i8_08_40_09_41_10_42_11_43_12_44_13_45_14_46_15_47_16_48_17_49_18_50_19_51_20_52_21_53_22_54_23_55 | 
|  | 1324 | ; AVX1:       # BB#0: | 
|  | 1325 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
|  | 1326 | ; AVX1-NEXT:    vpmovzxbw %xmm1, %xmm1 | 
|  | 1327 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm1[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u] | 
|  | 1328 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7] | 
|  | 1329 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[8,9,10,11,12,13,14,15,u,u,u,u,u,u,u,u] | 
|  | 1330 | ; AVX1-NEXT:    vpunpcklbw {{.*}} # xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] | 
|  | 1331 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1332 | ; AVX1-NEXT:    retq | 
|  | 1333 | ; | 
|  | 1334 | ; AVX2-LABEL: @shuffle_v32i8_08_40_09_41_10_42_11_43_12_44_13_45_14_46_15_47_16_48_17_49_18_50_19_51_20_52_21_53_22_54_23_55 | 
|  | 1335 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1336 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[8,u,9,u,10,u,11,u,12,u,13,u,14,u,15,u,16,u,17,u,18,u,19,u,20,u,21,u,22,u,23,u] | 
|  | 1337 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm1 = ymm1[u,8,u,9,u,10,u,11,u,12,u,13,u,14,u,15,u,16,u,17,u,18,u,19,u,20,u,21,u,22,u,23] | 
|  | 1338 | ; AVX2-NEXT:    vmovdqa {{.*}} # ymm2 = [0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128,0,128] | 
|  | 1339 | ; AVX2-NEXT:    vpblendvb %ymm2, %ymm0, %ymm1, %ymm0 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1340 | ; AVX2-NEXT:    retq | 
|  | 1341 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 8, i32 40, i32 9, i32 41, i32 10, i32 42, i32 11, i32 43, i32 12, i32 44, i32 13, i32 45, i32 14, i32 46, i32 15, i32 47, i32 16, i32 48, i32 17, i32 49, i32 18, i32 50, i32 19, i32 51, i32 20, i32 52, i32 21, i32 53, i32 22, i32 54, i32 23, i32 55> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1342 | ret <32 x i8> %shuffle | 
|  | 1343 | } | 
|  | 1344 |  | 
|  | 1345 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_16_17_16_16_16_16_16_16_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1346 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_16_17_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1347 | ; AVX1:       # BB#0: | 
|  | 1348 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] | 
|  | 1349 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1350 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 1351 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1352 | ; AVX1-NEXT:    retq | 
|  | 1353 | ; | 
|  | 1354 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_00_01_00_16_17_16_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1355 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1356 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,16,17,16,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1357 | ; AVX2-NEXT:    retq | 
|  | 1358 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 0, i32 16, i32 17, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1359 | ret <32 x i8> %shuffle | 
|  | 1360 | } | 
|  | 1361 |  | 
|  | 1362 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_16_16_18_16_16_16_16_16_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1363 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_16_16_18_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1364 | ; AVX1:       # BB#0: | 
|  | 1365 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] | 
|  | 1366 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1367 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 1368 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1369 | ; AVX1-NEXT:    retq | 
|  | 1370 | ; | 
|  | 1371 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_00_00_00_00_00_02_00_00_16_16_18_16_16_16_16_16_16_16_16_16_16_16_16_16 | 
|  | 1372 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1373 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,16,16,18,16,16,16,16,16,16,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1374 | ; AVX2-NEXT:    retq | 
|  | 1375 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 2, i32 0, i32 0, i32 16, i32 16, i32 18, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1376 | ret <32 x i8> %shuffle | 
|  | 1377 | } | 
|  | 1378 |  | 
|  | 1379 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_16_16_16_16_16_16_16_23_16_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1380 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_16_16_16_16_16_16_16_23_16_16_16_16_16_16_16_16 | 
|  | 1381 | ; AVX1:       # BB#0: | 
|  | 1382 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0] | 
|  | 1383 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1384 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,0] | 
|  | 1385 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1386 | ; AVX1-NEXT:    retq | 
|  | 1387 | ; | 
|  | 1388 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_00_07_00_00_00_00_00_00_00_16_16_16_16_16_16_16_23_16_16_16_16_16_16_16_16 | 
|  | 1389 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1390 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,16,16,16,16,16,16,16,23,16,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1391 | ; AVX2-NEXT:    retq | 
|  | 1392 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 7, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 23, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1393 | ret <32 x i8> %shuffle | 
|  | 1394 | } | 
|  | 1395 |  | 
|  | 1396 | define <32 x i8> @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_24_16_16_16_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1397 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_24_16_16_16_16_16_16_16 | 
|  | 1398 | ; AVX1:       # BB#0: | 
|  | 1399 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0] | 
|  | 1400 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1401 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,0] | 
|  | 1402 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1403 | ; AVX1-NEXT:    retq | 
|  | 1404 | ; | 
|  | 1405 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_00_00_00_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_24_16_16_16_16_16_16_16 | 
|  | 1406 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1407 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,16,16,16,16,16,16,16,16,24,16,16,16,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1408 | ; AVX2-NEXT:    retq | 
|  | 1409 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 8, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 24, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16> | 
|  | 1410 | ret <32 x i8> %shuffle | 
|  | 1411 | } | 
|  | 1412 |  | 
|  | 1413 | define <32 x i8> @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_30_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1414 | ; AVX1-LABEL: @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_30_16 | 
|  | 1415 | ; AVX1:       # BB#0: | 
|  | 1416 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 1417 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1418 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0] | 
|  | 1419 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1420 | ; AVX1-NEXT:    retq | 
|  | 1421 | ; | 
|  | 1422 | ; AVX2-LABEL: @shuffle_v32i8_00_14_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_30_16 | 
|  | 1423 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1424 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,16,16,16,16,16,16,16,16,16,16,16,16,16,30,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1425 | ; AVX2-NEXT:    retq | 
|  | 1426 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 14, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 30, i32 16> | 
|  | 1427 | ret <32 x i8> %shuffle | 
|  | 1428 | } | 
|  | 1429 |  | 
|  | 1430 | define <32 x i8> @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16_31(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1431 | ; AVX1-LABEL: @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16_31 | 
|  | 1432 | ; AVX1:       # BB#0: | 
|  | 1433 | ; AVX1-NEXT:    movl $15, %eax | 
|  | 1434 | ; AVX1-NEXT:    vmovd %eax, %xmm1 | 
|  | 1435 | ; AVX1-NEXT:    vpshufb %xmm1, %xmm0, %xmm1 | 
|  | 1436 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1437 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15] | 
|  | 1438 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1439 | ; AVX1-NEXT:    retq | 
|  | 1440 | ; | 
|  | 1441 | ; AVX2-LABEL: @shuffle_v32i8_15_00_00_00_00_00_00_00_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_16_16_16_16_16_16_16_31 | 
|  | 1442 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1443 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,31] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1444 | ; AVX2-NEXT:    retq | 
|  | 1445 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 15, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 31> | 
|  | 1446 | ret <32 x i8> %shuffle | 
|  | 1447 | } | 
|  | 1448 |  | 
|  | 1449 | define <32 x i8> @shuffle_v32i8_00_00_00_00_04_04_04_04_08_08_08_08_12_12_12_12_28_28_28_28_24_24_24_24_20_20_20_20_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1450 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_00_04_04_04_04_08_08_08_08_12_12_12_12_28_28_28_28_24_24_24_24_20_20_20_20_16_16_16_16 | 
|  | 1451 | ; AVX1:       # BB#0: | 
|  | 1452 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,0,0,4,4,4,4,8,8,8,8,12,12,12,12] | 
|  | 1453 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1454 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[12,12,12,12,8,8,8,8,4,4,4,4,0,0,0,0] | 
|  | 1455 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1456 | ; AVX1-NEXT:    retq | 
|  | 1457 | ; | 
|  | 1458 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_00_04_04_04_04_08_08_08_08_12_12_12_12_28_28_28_28_24_24_24_24_20_20_20_20_16_16_16_16 | 
|  | 1459 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1460 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,0,4,4,4,4,8,8,8,8,12,12,12,12,28,28,28,28,24,24,24,24,20,20,20,20,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1461 | ; AVX2-NEXT:    retq | 
|  | 1462 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 4, i32 4, i32 4, i32 4, i32 8, i32 8, i32 8, i32 8, i32 12, i32 12, i32 12, i32 12, i32 28, i32 28, i32 28, i32 28, i32 24, i32 24, i32 24, i32 24, i32 20, i32 20, i32 20, i32 20, i32 16, i32 16, i32 16, i32 16> | 
|  | 1463 | ret <32 x i8> %shuffle | 
|  | 1464 | } | 
|  | 1465 |  | 
|  | 1466 | define <32 x i8> @shuffle_v32i8_08_08_08_08_08_08_08_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_24_24_24_24_24_24_24_24(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1467 | ; AVX1-LABEL: @shuffle_v32i8_08_08_08_08_08_08_08_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_24_24_24_24_24_24_24_24 | 
|  | 1468 | ; AVX1:       # BB#0: | 
|  | 1469 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[8,8,8,8,8,8,8,8,0,0,0,0,0,0,0,0] | 
|  | 1470 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1471 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,0,0,0,0,8,8,8,8,8,8,8,8] | 
|  | 1472 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1473 | ; AVX1-NEXT:    retq | 
|  | 1474 | ; | 
|  | 1475 | ; AVX2-LABEL: @shuffle_v32i8_08_08_08_08_08_08_08_08_00_00_00_00_00_00_00_00_16_16_16_16_16_16_16_16_24_24_24_24_24_24_24_24 | 
|  | 1476 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1477 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[8,8,8,8,8,8,8,8,0,0,0,0,0,0,0,0,16,16,16,16,16,16,16,16,24,24,24,24,24,24,24,24] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1478 | ; AVX2-NEXT:    retq | 
|  | 1479 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24> | 
|  | 1480 | ret <32 x i8> %shuffle | 
|  | 1481 | } | 
|  | 1482 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1483 | define <32 x i8> @shuffle_v32i8_00_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_16_16_16_16_uu_uu_uu_uu_uu_16_16_16_16_16_30_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1484 | ; AVX1-LABEL: @shuffle_v32i8_00_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_16_16_16_16_uu_uu_uu_uu_uu_16_16_16_16_16_30_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1485 | ; AVX1:       # BB#0: | 
|  | 1486 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1487 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm1[0,0,0,0,u,u,u,u,u,0,0,0,0,0,14,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1488 | ; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0 | 
|  | 1489 | ; AVX1-NEXT:    retq | 
|  | 1490 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1491 | ; AVX2-LABEL: @shuffle_v32i8_00_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_uu_16_16_16_16_uu_uu_uu_uu_uu_16_16_16_16_16_30_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1492 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1493 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,16,16,16,u,u,u,u,u,16,16,16,16,16,30,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1494 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1495 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 16, i32 16, i32 16, i32 16, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 16, i32 16, i32 16, i32 16, i32 16, i32 30, i32 16> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1496 | ret <32 x i8> %shuffle | 
|  | 1497 | } | 
|  | 1498 |  | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 1499 | define <32 x i8> @shuffle_v32i8_uu_14_uu_uu_00_00_00_00_00_00_00_00_00_00_00_00_16_16_uu_16_uu_uu_uu_uu_16_16_16_16_16_16_30_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1500 | ; AVX1-LABEL: @shuffle_v32i8_uu_14_uu_uu_00_00_00_00_00_00_00_00_00_00_00_00_16_16_uu_16_uu_uu_uu_uu_16_16_16_16_16_16_30_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1501 | ; AVX1:       # BB#0: | 
|  | 1502 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0] | 
|  | 1503 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 1504 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,u,0,u,u,u,u,0,0,0,0,0,0,14,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1505 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1506 | ; AVX1-NEXT:    retq | 
|  | 1507 | ; | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 1508 | ; AVX2-LABEL: @shuffle_v32i8_uu_14_uu_uu_00_00_00_00_00_00_00_00_00_00_00_00_16_16_uu_16_uu_uu_uu_uu_16_16_16_16_16_16_30_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1509 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1510 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[u,14,u,u,0,0,0,0,0,0,0,0,0,0,0,0,16,16,u,16,u,u,u,u,16,16,16,16,16,16,30,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1511 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | 397d12c | 2014-09-25 02:44:39 +0000 | [diff] [blame] | 1512 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 undef, i32 14, i32 undef, i32 undef, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 16, i32 16, i32 undef, i32 16, i32 undef, i32 undef, i32 undef, i32 undef, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 30, i32 16> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1513 | ret <32 x i8> %shuffle | 
|  | 1514 | } | 
|  | 1515 |  | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1516 | define <32 x i8> @shuffle_v32i8_00_00_00_uu_uu_uu_04_uu_08_08_08_08_uu_uu_12_uu_28_28_28_28_uu_uu_uu_24_20_20_20_20_16_16_16_16(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1517 | ; AVX1-LABEL: @shuffle_v32i8_00_00_00_uu_uu_uu_04_uu_08_08_08_08_uu_uu_12_uu_28_28_28_28_uu_uu_uu_24_20_20_20_20_16_16_16_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1518 | ; AVX1:       # BB#0: | 
|  | 1519 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm1 = xmm0[0,0,0,0,4,4,4,4,8,8,8,8,12,12,12,12] | 
|  | 1520 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1521 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[12,12,12,12,12,12,13,13,4,4,4,4,0,0,0,0] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1522 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1523 | ; AVX1-NEXT:    retq | 
|  | 1524 | ; | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1525 | ; AVX2-LABEL: @shuffle_v32i8_00_00_00_uu_uu_uu_04_uu_08_08_08_08_uu_uu_12_uu_28_28_28_28_uu_uu_uu_24_20_20_20_20_16_16_16_16 | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1526 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1527 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[0,0,0,u,u,u,4,u,8,8,8,8,u,u,12,u,28,28,28,28,u,u,u,24,20,20,20,20,16,16,16,16] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1528 | ; AVX2-NEXT:    retq | 
| Chandler Carruth | a03011f | 2014-09-25 02:20:02 +0000 | [diff] [blame] | 1529 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 0, i32 0, i32 0, i32 undef, i32 undef, i32 undef, i32 4, i32 undef, i32 8, i32 8, i32 8, i32 8, i32 undef, i32 undef, i32 12, i32 undef, i32 28, i32 28, i32 28, i32 28, i32 undef, i32 undef, i32 undef, i32 24, i32 20, i32 20, i32 20, i32 20, i32 16, i32 16, i32 16, i32 16> | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1530 | ret <32 x i8> %shuffle | 
|  | 1531 | } | 
|  | 1532 |  | 
|  | 1533 | define <32 x i8> @shuffle_v32i8_08_08_08_08_08_08_08_08_uu_uu_uu_uu_uu_uu_uu_uu_16_16_16_uu_uu_uu_uu_uu_uu_uu_24_24_24_24_24_24(<32 x i8> %a, <32 x i8> %b) { | 
|  | 1534 | ; AVX1-LABEL: @shuffle_v32i8_08_08_08_08_08_08_08_08_uu_uu_uu_uu_uu_uu_uu_uu_16_16_16_uu_uu_uu_uu_uu_uu_uu_24_24_24_24_24_24 | 
|  | 1535 | ; AVX1:       # BB#0: | 
|  | 1536 | ; AVX1-NEXT:    vpunpckhbw {{.*}} # xmm1 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15] | 
|  | 1537 | ; AVX1-NEXT:    vpshuflw {{.*}} # xmm1 = xmm1[0,0,0,0,4,5,6,7] | 
|  | 1538 | ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | 1539 | ; AVX1-NEXT:    vpshufb {{.*}} # xmm0 = xmm0[0,0,0,0,8,8,9,9,8,8,8,8,8,8,8,8] | 
|  | 1540 | ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0 | 
|  | 1541 | ; AVX1-NEXT:    retq | 
|  | 1542 | ; | 
|  | 1543 | ; AVX2-LABEL: @shuffle_v32i8_08_08_08_08_08_08_08_08_uu_uu_uu_uu_uu_uu_uu_uu_16_16_16_uu_uu_uu_uu_uu_uu_uu_24_24_24_24_24_24 | 
|  | 1544 | ; AVX2:       # BB#0: | 
| Chandler Carruth | d8f528a | 2014-09-25 02:52:12 +0000 | [diff] [blame] | 1545 | ; AVX2-NEXT:    vpshufb {{.*}} # ymm0 = ymm0[8,8,8,8,8,8,8,8,u,u,u,u,u,u,u,u,16,16,16,u,u,u,u,u,u,u,24,24,24,24,24,24] | 
| Chandler Carruth | 44deb80 | 2014-09-22 20:25:08 +0000 | [diff] [blame] | 1546 | ; AVX2-NEXT:    retq | 
|  | 1547 | %shuffle = shufflevector <32 x i8> %a, <32 x i8> %b, <32 x i32> <i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 16, i32 16, i32 16, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24> | 
|  | 1548 | ret <32 x i8> %shuffle | 
|  | 1549 | } |