Reland of "D3D11: Fix draw perf regression with input layouts."

f8dd7b10 fixed unnecessary shader compiles, but introduced a draw
call perf regression. Fix the regression robustly by storing a
minimal set of input layout elements/vertex attribute info,
but also handle the case where the vector sizes mismatch between
a cached and new vertex layout.

Reland with fix for comparison warning.

BUG=510151

Change-Id: I578ddbb5b5bb12e7c1a901f23c7b5fcbd64b5d23
Reviewed-on: https://chromium-review.googlesource.com/292460
Tested-by: Jamie Madill <jmadill@chromium.org>
Reviewed-by: Corentin Wallez <cwallez@chromium.org>
diff --git a/src/libANGLE/renderer/d3d/ProgramD3D.cpp b/src/libANGLE/renderer/d3d/ProgramD3D.cpp
index 10d1415..3532634 100644
--- a/src/libANGLE/renderer/d3d/ProgramD3D.cpp
+++ b/src/libANGLE/renderer/d3d/ProgramD3D.cpp
@@ -59,12 +59,9 @@
 
 gl::InputLayout GetDefaultInputLayoutFromShader(const gl::Shader *vertexShader)
 {
-    gl::InputLayout defaultLayout(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
-    const auto &shaderAttributes = vertexShader->getActiveAttributes();
-    size_t layoutIndex = 0;
-    for (size_t attribIndex = 0; attribIndex < shaderAttributes.size(); ++attribIndex)
+    gl::InputLayout defaultLayout;
+    for (const sh::Attribute &shaderAttr : vertexShader->getActiveAttributes())
     {
-        const sh::Attribute &shaderAttr = shaderAttributes[attribIndex];
         if (shaderAttr.type != GL_NONE)
         {
             GLenum transposedType = gl::TransposeMatrixType(shaderAttr.type);
@@ -79,7 +76,7 @@
                 gl::VertexFormatType defaultType = gl::GetVertexFormatType(
                     componentType, GL_FALSE, components, pureInt);
 
-                defaultLayout[layoutIndex++] = defaultType;
+                defaultLayout.push_back(defaultType);
             }
         }
     }
@@ -150,23 +147,36 @@
                                                 const gl::InputLayout &inputLayout,
                                                 Signature *signatureOut)
 {
-    signatureOut->assign(inputLayout.size(), false);
+    signatureOut->resize(inputLayout.size());
 
     for (size_t index = 0; index < inputLayout.size(); ++index)
     {
         gl::VertexFormatType vertexFormatType = inputLayout[index];
+        bool converted = false;
         if (vertexFormatType != gl::VERTEX_FORMAT_INVALID)
         {
             VertexConversionType conversionType =
                 renderer->getVertexConversionType(vertexFormatType);
-            (*signatureOut)[index] = ((conversionType & VERTEX_CONVERT_GPU) != 0);
+            converted = ((conversionType & VERTEX_CONVERT_GPU) != 0);
         }
+
+        (*signatureOut)[index] = converted;
     }
 }
 
 bool ProgramD3D::VertexExecutable::matchesSignature(const Signature &signature) const
 {
-    return mSignature == signature;
+    size_t limit = std::max(mSignature.size(), signature.size());
+    for (size_t index = 0; index < limit; ++index)
+    {
+        // treat undefined indexes as 'not converted'
+        bool a = index < signature.size() ? signature[index] : false;
+        bool b = index < mSignature.size() ? mSignature[index] : false;
+        if (a != b)
+            return false;
+    }
+
+    return true;
 }
 
 ProgramD3D::PixelExecutable::PixelExecutable(const std::vector<GLenum> &outputSignature,
@@ -2060,7 +2070,7 @@
 
 void ProgramD3D::updateCachedInputLayout(const gl::Program *program, const gl::State &state)
 {
-    mCachedInputLayout.assign(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
+    mCachedInputLayout.clear();
     const int *semanticIndexes = program->getSemanticIndexes();
 
     const auto &vertexAttributes = state.getVertexArray()->getVertexAttributes();
@@ -2071,6 +2081,10 @@
 
         if (semanticIndex != -1)
         {
+            if (mCachedInputLayout.size() < static_cast<size_t>(semanticIndex + 1))
+            {
+                mCachedInputLayout.resize(semanticIndex + 1, gl::VERTEX_FORMAT_INVALID);
+            }
             mCachedInputLayout[semanticIndex] =
                 GetVertexFormatType(vertexAttributes[attributeIndex],
                                     state.getVertexAttribCurrentValue(attributeIndex).Type);