Use a X86 target specific node X86ISD::PINSRW instead of a mal-formed
INSERT_VECTOR_ELT to insert a 16-bit value in a 128-bit vector.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@27314 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Target/X86/X86ISelLowering.h b/lib/Target/X86/X86ISelLowering.h
index 3f8ec84..9722d6f 100644
--- a/lib/Target/X86/X86ISelLowering.h
+++ b/lib/Target/X86/X86ISelLowering.h
@@ -155,8 +155,12 @@
ZEXT_S2VEC,
/// PEXTRW - Extract a 16-bit value from a vector and zero extend it to
- /// i32, corresponds to X86::PINSRW.
+ /// i32, corresponds to X86::PEXTRW.
PEXTRW,
+
+ /// PINSRW - Insert the lower 16-bits of a 32-bit value to a vector,
+ /// corresponds to X86::PINSRW.
+ PINSRW,
};
// X86 specific condition code. These correspond to X86_*_COND in