Fix linking of struct varyings.
The dEQP varying struct linkage tests were triggering
asserts when we tried to query component info of struct types.
BUG=angle:580
Change-Id: I61e33573c5577a327c58fec7a07ae7718690ac42
Reviewed-on: https://chromium-review.googlesource.com/189194
Reviewed-by: Shannon Woods <shannonwoods@chromium.org>
Reviewed-by: Geoff Lang <geofflang@chromium.org>
Tested-by: Jamie Madill <jmadill@chromium.org>
diff --git a/src/libGLESv2/DynamicHLSL.cpp b/src/libGLESv2/DynamicHLSL.cpp
index 5477185..585c015 100644
--- a/src/libGLESv2/DynamicHLSL.cpp
+++ b/src/libGLESv2/DynamicHLSL.cpp
@@ -270,11 +270,20 @@
std::string n = Str(varying->registerIndex + elementIndex * variableRows + row);
- // matrices within structs are not transposed, hence we do not use the special struct prefix "rm"
- GLenum componentType = gl::UniformComponentType(transposedType);
- int columnCount = gl::VariableColumnCount(transposedType);
- std::string componentTypeString = gl_d3d::HLSLComponentTypeString(componentType, columnCount);
- std::string typeString = (varying->isStruct() ? "_" + varying->structName : componentTypeString);
+ std::string typeString;
+
+ if (varying->isStruct())
+ {
+ // matrices within structs are not transposed, so
+ // do not use the special struct prefix "rm"
+ typeString = decorateVariable(varying->structName);
+ }
+ else
+ {
+ GLenum componentType = gl::UniformComponentType(transposedType);
+ int columnCount = gl::VariableColumnCount(transposedType);
+ typeString = gl_d3d::HLSLComponentTypeString(componentType, columnCount);
+ }
varyingHLSL += typeString + " v" + n + " : " + varyingSemantic + n + ";\n";
}
}
@@ -313,11 +322,11 @@
structHLSL += " " + gl_d3d::HLSLComponentTypeString(componentType, UniformComponentCount(shaderAttribute.type));
}
- structHLSL += " " + decorateAttribute(shaderAttribute.name) + " : TEXCOORD" + Str(semanticIndex) + ";\n";
+ structHLSL += " " + decorateVariable(shaderAttribute.name) + " : TEXCOORD" + Str(semanticIndex) + ";\n";
semanticIndex += AttributeRegisterCount(shaderAttribute.type);
// HLSL code for initialization
- initHLSL += " " + decorateAttribute(shaderAttribute.name) + " = ";
+ initHLSL += " " + decorateVariable(shaderAttribute.name) + " = ";
// Mismatched vertex attribute to vertex input may result in an undefined
// data reinterpretation (eg for pure integer->float, float->pure integer)
@@ -329,7 +338,7 @@
}
else
{
- initHLSL += "input." + decorateAttribute(shaderAttribute.name);
+ initHLSL += "input." + decorateVariable(shaderAttribute.name);
}
initHLSL += ";\n";
@@ -926,7 +935,7 @@
}
// This method needs to match OutputHLSL::decorate
-std::string DynamicHLSL::decorateAttribute(const std::string &name)
+std::string DynamicHLSL::decorateVariable(const std::string &name)
{
if (name.compare(0, 3, "gl_") != 0 && name.compare(0, 3, "dx_") != 0)
{
@@ -938,7 +947,7 @@
std::string DynamicHLSL::generateAttributeConversionHLSL(const VertexFormat &vertexFormat, const sh::ShaderVariable &shaderAttrib) const
{
- std::string attribString = "input." + decorateAttribute(shaderAttrib.name);
+ std::string attribString = "input." + decorateVariable(shaderAttrib.name);
// Matrix
if (IsMatrixType(shaderAttrib.type))