blob: 4236fabea0a6e03e09c2aa3928ab8789d8499b93 [file] [log] [blame]
Bill Schmidtb34c79e2013-02-20 15:50:31 +00001; RUN: llc -O0 -mcpu=pwr7 <%s | FileCheck %s
2
Bill Schmidtabc40282013-02-20 20:41:42 +00003; Test optimizations of build_vector for 6-bit immediates.
Bill Schmidtb34c79e2013-02-20 15:50:31 +00004
5target datalayout = "E-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-f128:128:128-v128:128:128-n32:64"
6target triple = "powerpc64-unknown-linux-gnu"
7
8%v4i32 = type <4 x i32>
9%v8i16 = type <8 x i16>
10%v16i8 = type <16 x i8>
11
Bill Schmidtabc40282013-02-20 20:41:42 +000012define void @test_v4i32_pos_even(%v4i32* %P, %v4i32* %S) {
Bill Schmidtb34c79e2013-02-20 15:50:31 +000013 %p = load %v4i32* %P
14 %r = add %v4i32 %p, < i32 18, i32 18, i32 18, i32 18 >
15 store %v4i32 %r, %v4i32* %S
16 ret void
17}
18
Stephen Linb4dc0232013-07-13 20:38:47 +000019; CHECK-LABEL: test_v4i32_pos_even:
Bill Schmidtb34c79e2013-02-20 15:50:31 +000020; CHECK: vspltisw [[REG1:[0-9]+]], 9
21; CHECK: vadduwm {{[0-9]+}}, [[REG1]], [[REG1]]
22
Bill Schmidtabc40282013-02-20 20:41:42 +000023define void @test_v4i32_neg_even(%v4i32* %P, %v4i32* %S) {
Bill Schmidtb34c79e2013-02-20 15:50:31 +000024 %p = load %v4i32* %P
25 %r = add %v4i32 %p, < i32 -28, i32 -28, i32 -28, i32 -28 >
26 store %v4i32 %r, %v4i32* %S
27 ret void
28}
29
Stephen Linb4dc0232013-07-13 20:38:47 +000030; CHECK-LABEL: test_v4i32_neg_even:
Bill Schmidtb34c79e2013-02-20 15:50:31 +000031; CHECK: vspltisw [[REG1:[0-9]+]], -14
32; CHECK: vadduwm {{[0-9]+}}, [[REG1]], [[REG1]]
33
Bill Schmidtabc40282013-02-20 20:41:42 +000034define void @test_v8i16_pos_even(%v8i16* %P, %v8i16* %S) {
Bill Schmidtb34c79e2013-02-20 15:50:31 +000035 %p = load %v8i16* %P
36 %r = add %v8i16 %p, < i16 30, i16 30, i16 30, i16 30, i16 30, i16 30, i16 30, i16 30 >
37 store %v8i16 %r, %v8i16* %S
38 ret void
39}
40
Stephen Linb4dc0232013-07-13 20:38:47 +000041; CHECK-LABEL: test_v8i16_pos_even:
Bill Schmidtb34c79e2013-02-20 15:50:31 +000042; CHECK: vspltish [[REG1:[0-9]+]], 15
43; CHECK: vadduhm {{[0-9]+}}, [[REG1]], [[REG1]]
44
Bill Schmidtabc40282013-02-20 20:41:42 +000045define void @test_v8i16_neg_even(%v8i16* %P, %v8i16* %S) {
Bill Schmidtb34c79e2013-02-20 15:50:31 +000046 %p = load %v8i16* %P
47 %r = add %v8i16 %p, < i16 -32, i16 -32, i16 -32, i16 -32, i16 -32, i16 -32, i16 -32, i16 -32 >
48 store %v8i16 %r, %v8i16* %S
49 ret void
50}
51
Stephen Linb4dc0232013-07-13 20:38:47 +000052; CHECK-LABEL: test_v8i16_neg_even:
Bill Schmidtb34c79e2013-02-20 15:50:31 +000053; CHECK: vspltish [[REG1:[0-9]+]], -16
54; CHECK: vadduhm {{[0-9]+}}, [[REG1]], [[REG1]]
55
Bill Schmidtabc40282013-02-20 20:41:42 +000056define void @test_v16i8_pos_even(%v16i8* %P, %v16i8* %S) {
Bill Schmidtb34c79e2013-02-20 15:50:31 +000057 %p = load %v16i8* %P
58 %r = add %v16i8 %p, < i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16, i8 16 >
59 store %v16i8 %r, %v16i8* %S
60 ret void
61}
62
Stephen Linb4dc0232013-07-13 20:38:47 +000063; CHECK-LABEL: test_v16i8_pos_even:
Bill Schmidtb34c79e2013-02-20 15:50:31 +000064; CHECK: vspltisb [[REG1:[0-9]+]], 8
65; CHECK: vaddubm {{[0-9]+}}, [[REG1]], [[REG1]]
66
Bill Schmidtabc40282013-02-20 20:41:42 +000067define void @test_v16i8_neg_even(%v16i8* %P, %v16i8* %S) {
Bill Schmidtb34c79e2013-02-20 15:50:31 +000068 %p = load %v16i8* %P
69 %r = add %v16i8 %p, < i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18, i8 -18 >
70 store %v16i8 %r, %v16i8* %S
71 ret void
72}
73
Stephen Linb4dc0232013-07-13 20:38:47 +000074; CHECK-LABEL: test_v16i8_neg_even:
Bill Schmidtb34c79e2013-02-20 15:50:31 +000075; CHECK: vspltisb [[REG1:[0-9]+]], -9
76; CHECK: vaddubm {{[0-9]+}}, [[REG1]], [[REG1]]
77
Bill Schmidtabc40282013-02-20 20:41:42 +000078define void @test_v4i32_pos_odd(%v4i32* %P, %v4i32* %S) {
79 %p = load %v4i32* %P
80 %r = add %v4i32 %p, < i32 27, i32 27, i32 27, i32 27 >
81 store %v4i32 %r, %v4i32* %S
82 ret void
83}
84
Stephen Linb4dc0232013-07-13 20:38:47 +000085; CHECK-LABEL: test_v4i32_pos_odd:
Bill Schmidtabc40282013-02-20 20:41:42 +000086; CHECK: vspltisw [[REG2:[0-9]+]], -16
87; CHECK: vspltisw [[REG1:[0-9]+]], 11
88; CHECK: vsubuwm {{[0-9]+}}, [[REG1]], [[REG2]]
89
90define void @test_v4i32_neg_odd(%v4i32* %P, %v4i32* %S) {
91 %p = load %v4i32* %P
92 %r = add %v4i32 %p, < i32 -27, i32 -27, i32 -27, i32 -27 >
93 store %v4i32 %r, %v4i32* %S
94 ret void
95}
96
Stephen Linb4dc0232013-07-13 20:38:47 +000097; CHECK-LABEL: test_v4i32_neg_odd:
Bill Schmidtabc40282013-02-20 20:41:42 +000098; CHECK: vspltisw [[REG2:[0-9]+]], -16
99; CHECK: vspltisw [[REG1:[0-9]+]], -11
100; CHECK: vadduwm {{[0-9]+}}, [[REG1]], [[REG2]]
101
102define void @test_v8i16_pos_odd(%v8i16* %P, %v8i16* %S) {
103 %p = load %v8i16* %P
104 %r = add %v8i16 %p, < i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31 >
105 store %v8i16 %r, %v8i16* %S
106 ret void
107}
108
Stephen Linb4dc0232013-07-13 20:38:47 +0000109; CHECK-LABEL: test_v8i16_pos_odd:
Bill Schmidtabc40282013-02-20 20:41:42 +0000110; CHECK: vspltish [[REG2:[0-9]+]], -16
111; CHECK: vspltish [[REG1:[0-9]+]], 15
112; CHECK: vsubuhm {{[0-9]+}}, [[REG1]], [[REG2]]
113
114define void @test_v8i16_neg_odd(%v8i16* %P, %v8i16* %S) {
115 %p = load %v8i16* %P
116 %r = add %v8i16 %p, < i16 -31, i16 -31, i16 -31, i16 -31, i16 -31, i16 -31, i16 -31, i16 -31 >
117 store %v8i16 %r, %v8i16* %S
118 ret void
119}
120
Stephen Linb4dc0232013-07-13 20:38:47 +0000121; CHECK-LABEL: test_v8i16_neg_odd:
Bill Schmidtabc40282013-02-20 20:41:42 +0000122; CHECK: vspltish [[REG2:[0-9]+]], -16
123; CHECK: vspltish [[REG1:[0-9]+]], -15
124; CHECK: vadduhm {{[0-9]+}}, [[REG1]], [[REG2]]
125
126define void @test_v16i8_pos_odd(%v16i8* %P, %v16i8* %S) {
127 %p = load %v16i8* %P
128 %r = add %v16i8 %p, < i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17, i8 17 >
129 store %v16i8 %r, %v16i8* %S
130 ret void
131}
132
Stephen Linb4dc0232013-07-13 20:38:47 +0000133; CHECK-LABEL: test_v16i8_pos_odd:
Bill Schmidtabc40282013-02-20 20:41:42 +0000134; CHECK: vspltisb [[REG2:[0-9]+]], -16
135; CHECK: vspltisb [[REG1:[0-9]+]], 1
136; CHECK: vsububm {{[0-9]+}}, [[REG1]], [[REG2]]
137
138define void @test_v16i8_neg_odd(%v16i8* %P, %v16i8* %S) {
139 %p = load %v16i8* %P
140 %r = add %v16i8 %p, < i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17, i8 -17 >
141 store %v16i8 %r, %v16i8* %S
142 ret void
143}
144
Stephen Linb4dc0232013-07-13 20:38:47 +0000145; CHECK-LABEL: test_v16i8_neg_odd:
Bill Schmidtabc40282013-02-20 20:41:42 +0000146; CHECK: vspltisb [[REG2:[0-9]+]], -16
147; CHECK: vspltisb [[REG1:[0-9]+]], -1
148; CHECK: vaddubm {{[0-9]+}}, [[REG1]], [[REG2]]
149