Allow '\' characters in shader source for WebGL2.
TEST=deqp/data/gles3/shaders/preprocessor.html
BUG=angleproject:2016
Change-Id: Ia5240a1ff65ebadc15604a5c3eb63042953c43a5
Reviewed-on: https://chromium-review.googlesource.com/540198
Commit-Queue: Geoff Lang <geofflang@chromium.org>
Reviewed-by: Corentin Wallez <cwallez@chromium.org>
diff --git a/src/tests/gl_tests/WebGLCompatibilityTest.cpp b/src/tests/gl_tests/WebGLCompatibilityTest.cpp
index 9f4b423..4176ff5 100644
--- a/src/tests/gl_tests/WebGLCompatibilityTest.cpp
+++ b/src/tests/gl_tests/WebGLCompatibilityTest.cpp
@@ -873,7 +873,11 @@
"abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
const std::string validUniformName =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_1234567890";
- const char invalidSet[] = {'"', '$', '`', '@', '\\', '\''};
+ std::vector<char> invalidSet = {'"', '$', '`', '@', '\''};
+ if (getClientMajorVersion() < 3)
+ {
+ invalidSet.push_back('\\');
+ }
std::string vert = "attribute float ";
vert += validAttribName;