[X86] Add add/sub saturation undef tests.
llvm-svn: 351066
diff --git a/llvm/test/CodeGen/X86/combine-sub-ssat.ll b/llvm/test/CodeGen/X86/combine-sub-ssat.ll
index a17573d..423efd2b 100644
--- a/llvm/test/CodeGen/X86/combine-sub-ssat.ll
+++ b/llvm/test/CodeGen/X86/combine-sub-ssat.ll
@@ -11,13 +11,43 @@
declare i64 @llvm.ssub.sat.i64 (i64, i64)
declare <8 x i16> @llvm.ssub.sat.v8i16(<8 x i16>, <8 x i16>)
+; fold (ssub_sat x, undef) -> 0
+define i32 @combine_undef_i32(i32 %a0) {
+; CHECK-LABEL: combine_undef_i32:
+; CHECK: # %bb.0:
+; CHECK-NEXT: xorl %eax, %eax
+; CHECK-NEXT: movl %edi, %ecx
+; CHECK-NEXT: subl %eax, %ecx
+; CHECK-NEXT: setns %al
+; CHECK-NEXT: addl $2147483647, %eax # imm = 0x7FFFFFFF
+; CHECK-NEXT: subl %eax, %edi
+; CHECK-NEXT: cmovnol %edi, %eax
+; CHECK-NEXT: retq
+ %res = call i32 @llvm.ssub.sat.i32(i32 %a0, i32 undef)
+ ret i32 %res
+}
+
+define <8 x i16> @combine_undef_v8i16(<8 x i16> %a0) {
+; SSE-LABEL: combine_undef_v8i16:
+; SSE: # %bb.0:
+; SSE-NEXT: psubsw %xmm0, %xmm0
+; SSE-NEXT: retq
+;
+; AVX-LABEL: combine_undef_v8i16:
+; AVX: # %bb.0:
+; AVX-NEXT: vpsubsw %xmm0, %xmm0, %xmm0
+; AVX-NEXT: retq
+ %res = call <8 x i16> @llvm.ssub.sat.v8i16(<8 x i16> undef, <8 x i16> %a0)
+ ret <8 x i16> %res
+}
+
; fold (ssub_sat c, 0) -> x
define i32 @combine_zero_i32(i32 %a0) {
; CHECK-LABEL: combine_zero_i32:
; CHECK: # %bb.0:
; CHECK-NEXT: movl %edi, %eax
; CHECK-NEXT: retq
- %1 = call i32 @llvm.ssub.sat.i32(i32 %a0, i32 0);
+ %1 = call i32 @llvm.ssub.sat.i32(i32 %a0, i32 0)
ret i32 %1
}
@@ -25,6 +55,6 @@
; CHECK-LABEL: combine_zero_v8i16:
; CHECK: # %bb.0:
; CHECK-NEXT: retq
- %1 = call <8 x i16> @llvm.ssub.sat.v8i16(<8 x i16> %a0, <8 x i16> zeroinitializer);
+ %1 = call <8 x i16> @llvm.ssub.sat.v8i16(<8 x i16> %a0, <8 x i16> zeroinitializer)
ret <8 x i16> %1
}