D3D11: Fix overlapping vertex shader signatures.

For the case of drawing with un-normalized integer vertex attributes,
we need to do some dynamic conversion in the VS. However, each
attribute can either be signed or unsigned, and our shader signature
code would treat both as a match, giving rise to a warning in the
D3D11 Debug runtime. It's unclear if this would give incorrect
results, but it certainly should not produce a warning.

BUG=angleproject:1329

Change-Id: I302d11b44e8a0ef981e89c181aefac5451a899b7
Reviewed-on: https://chromium-review.googlesource.com/329998
Commit-Queue: Jamie Madill <jmadill@chromium.org>
Reviewed-by: Corentin Wallez <cwallez@chromium.org>
Reviewed-by: Zhenyao Mo <zmo@chromium.org>
diff --git a/src/libANGLE/renderer/d3d/ProgramD3D.cpp b/src/libANGLE/renderer/d3d/ProgramD3D.cpp
index e8f7e59..7586a7e 100644
--- a/src/libANGLE/renderer/d3d/ProgramD3D.cpp
+++ b/src/libANGLE/renderer/d3d/ProgramD3D.cpp
@@ -509,24 +509,44 @@
 }
 
 // static
+ProgramD3D::VertexExecutable::HLSLAttribType ProgramD3D::VertexExecutable::GetAttribType(
+    GLenum type)
+{
+    switch (type)
+    {
+        case GL_INT:
+            return HLSLAttribType::SIGNED_INT;
+        case GL_UNSIGNED_INT:
+            return HLSLAttribType::UNSIGNED_INT;
+        case GL_SIGNED_NORMALIZED:
+        case GL_UNSIGNED_NORMALIZED:
+        case GL_FLOAT:
+            return HLSLAttribType::FLOAT;
+        default:
+            UNREACHABLE();
+            return HLSLAttribType::FLOAT;
+    }
+}
+
+// static
 void ProgramD3D::VertexExecutable::getSignature(RendererD3D *renderer,
                                                 const gl::InputLayout &inputLayout,
                                                 Signature *signatureOut)
 {
-    signatureOut->resize(inputLayout.size());
+    signatureOut->assign(inputLayout.size(), HLSLAttribType::FLOAT);
 
     for (size_t index = 0; index < inputLayout.size(); ++index)
     {
         gl::VertexFormatType vertexFormatType = inputLayout[index];
-        bool converted = false;
-        if (vertexFormatType != gl::VERTEX_FORMAT_INVALID)
-        {
-            VertexConversionType conversionType =
-                renderer->getVertexConversionType(vertexFormatType);
-            converted = ((conversionType & VERTEX_CONVERT_GPU) != 0);
-        }
+        if (vertexFormatType == gl::VERTEX_FORMAT_INVALID)
+            continue;
 
-        (*signatureOut)[index] = converted;
+        VertexConversionType conversionType = renderer->getVertexConversionType(vertexFormatType);
+        if ((conversionType & VERTEX_CONVERT_GPU) == 0)
+            continue;
+
+        GLenum componentType = renderer->getVertexComponentType(vertexFormatType);
+        (*signatureOut)[index] = GetAttribType(componentType);
     }
 }
 
@@ -535,9 +555,9 @@
     size_t limit = std::max(mSignature.size(), signature.size());
     for (size_t index = 0; index < limit; ++index)
     {
-        // treat undefined indexes as 'not converted'
-        bool a = index < signature.size() ? signature[index] : false;
-        bool b = index < mSignature.size() ? mSignature[index] : false;
+        // treat undefined indexes as FLOAT
+        auto a = index < signature.size() ? signature[index] : HLSLAttribType::FLOAT;
+        auto b = index < mSignature.size() ? mSignature[index] : HLSLAttribType::FLOAT;
         if (a != b)
             return false;
     }
diff --git a/src/libANGLE/renderer/d3d/ProgramD3D.h b/src/libANGLE/renderer/d3d/ProgramD3D.h
index 53ee9c8..b497f1d 100644
--- a/src/libANGLE/renderer/d3d/ProgramD3D.h
+++ b/src/libANGLE/renderer/d3d/ProgramD3D.h
@@ -254,7 +254,14 @@
     class VertexExecutable
     {
       public:
-        typedef std::vector<bool> Signature;
+        enum HLSLAttribType
+        {
+            FLOAT,
+            UNSIGNED_INT,
+            SIGNED_INT,
+        };
+
+        typedef std::vector<HLSLAttribType> Signature;
 
         VertexExecutable(const gl::InputLayout &inputLayout,
                          const Signature &signature,
@@ -271,6 +278,8 @@
         ShaderExecutableD3D *shaderExecutable() const { return mShaderExecutable; }
 
       private:
+        static HLSLAttribType GetAttribType(GLenum type);
+
         gl::InputLayout mInputs;
         Signature mSignature;
         ShaderExecutableD3D *mShaderExecutable;
diff --git a/src/tests/gl_tests/ProgramBinaryTest.cpp b/src/tests/gl_tests/ProgramBinaryTest.cpp
index 26f7809..011725f 100644
--- a/src/tests/gl_tests/ProgramBinaryTest.cpp
+++ b/src/tests/gl_tests/ProgramBinaryTest.cpp
@@ -124,6 +124,24 @@
     }
 }
 
+// Tests that switching between signed and unsigned un-normalized data doesn't trigger a bug
+// in the D3D11 back-end.
+TEST_P(ProgramBinaryTest, DynamicShadersSignatureBug)
+{
+    glUseProgram(mProgram);
+    glBindBuffer(GL_ARRAY_BUFFER, mBuffer);
+
+    GLint attribLocation = glGetAttribLocation(mProgram, "inputAttribute");
+    ASSERT_NE(-1, attribLocation);
+    glEnableVertexAttribArray(attribLocation);
+
+    glVertexAttribPointer(attribLocation, 2, GL_BYTE, GL_FALSE, 0, nullptr);
+    glDrawArrays(GL_POINTS, 0, 1);
+
+    glVertexAttribPointer(attribLocation, 2, GL_UNSIGNED_BYTE, GL_FALSE, 0, nullptr);
+    glDrawArrays(GL_POINTS, 0, 1);
+}
+
 // This tests the ability to successfully save and load a program binary.
 TEST_P(ProgramBinaryTest, SaveAndLoadBinary)
 {