Key the format tables on internal format and type.

Keying the format tables on internal format alone is not enough to fully
validate the unsized formats which require additional type information.

This CL has no functional changes, it just splits the tables and updates
the calls to GetInternalFormat info to provide type information when the
format is not sized.

BUG=angleproject:1523
BUG=angleproject:1958
BUG=angleproject:1228

Change-Id: I37e5201e7f54fa8eca01b8a6e64b11a6b94484e7
Reviewed-on: https://chromium-review.googlesource.com/468449
Commit-Queue: Geoff Lang <geofflang@chromium.org>
Reviewed-by: Jamie Madill <jmadill@chromium.org>
diff --git a/src/libANGLE/validationES3.cpp b/src/libANGLE/validationES3.cpp
index 249de5a..207b44c 100644
--- a/src/libANGLE/validationES3.cpp
+++ b/src/libANGLE/validationES3.cpp
@@ -25,20 +25,11 @@
 {
 
 static bool ValidateTexImageFormatCombination(gl::Context *context,
+                                              GLenum target,
                                               GLenum internalFormat,
                                               GLenum format,
                                               GLenum type)
 {
-    // For historical reasons, glTexImage2D and glTexImage3D pass in their internal format as a
-    // GLint instead of a GLenum. Therefor an invalid internal format gives a GL_INVALID_VALUE
-    // error instead of a GL_INVALID_ENUM error. As this validation function is only called in
-    // the validation codepaths for glTexImage2D/3D, we record a GL_INVALID_VALUE error.
-    const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat);
-    if (!formatInfo.textureSupport(context->getClientVersion(), context->getExtensions()))
-    {
-        context->handleError(Error(GL_INVALID_VALUE));
-        return false;
-    }
 
     // The type and format are valid if any supported internal format has that type and format
     if (!ValidES3Format(format) || !ValidES3Type(type))
@@ -47,6 +38,30 @@
         return false;
     }
 
+    // From the ES 3.0 spec section 3.8.3:
+    // Textures with a base internal format of DEPTH_COMPONENT or DEPTH_STENCIL are supported by
+    // texture image specification commands only if target is TEXTURE_2D, TEXTURE_2D_ARRAY, or
+    // TEXTURE_CUBE_MAP.Using these formats in conjunction with any other target will result in an
+    // INVALID_OPERATION error.
+    if (target == GL_TEXTURE_3D && (format == GL_DEPTH_COMPONENT || format == GL_DEPTH_STENCIL))
+    {
+        context->handleError(Error(
+            GL_INVALID_OPERATION,
+            "Format cannot be GL_DEPTH_COMPONENT or GL_DEPTH_STENCIL if target is GL_TEXTURE_3D"));
+        return false;
+    }
+
+    // For historical reasons, glTexImage2D and glTexImage3D pass in their internal format as a
+    // GLint instead of a GLenum. Therefor an invalid internal format gives a GL_INVALID_VALUE
+    // error instead of a GL_INVALID_ENUM error. As this validation function is only called in
+    // the validation codepaths for glTexImage2D/3D, we record a GL_INVALID_VALUE error.
+    const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
+    if (!formatInfo.textureSupport(context->getClientVersion(), context->getExtensions()))
+    {
+        context->handleError(Error(GL_INVALID_VALUE));
+        return false;
+    }
+
     // Check if this is a valid format combination to load texture data
     if (!ValidES3FormatCombination(format, type, internalFormat))
     {
@@ -171,14 +186,15 @@
 
     // Validate texture formats
     GLenum actualInternalFormat =
-        isSubImage ? texture->getFormat(target, level).asSized() : internalformat;
+        isSubImage ? texture->getFormat(target, level).info->sizedInternalFormat : internalformat;
     if (isSubImage && actualInternalFormat == GL_NONE)
     {
         context->handleError(Error(GL_INVALID_OPERATION, "Texture level does not exist."));
         return false;
     }
 
-    const gl::InternalFormat &actualFormatInfo = gl::GetInternalFormatInfo(actualInternalFormat);
+    const gl::InternalFormat &actualFormatInfo =
+        gl::GetSizedInternalFormatInfo(actualInternalFormat);
     if (isCompressed)
     {
         if (!actualFormatInfo.compressed)
@@ -188,8 +204,8 @@
             return false;
         }
 
-        if (!ValidCompressedImageSize(context, actualInternalFormat, xoffset, yoffset, width,
-                                      height))
+        if (!ValidCompressedImageSize(context, actualFormatInfo.internalFormat, xoffset, yoffset,
+                                      width, height))
         {
             context->handleError(Error(GL_INVALID_OPERATION));
             return false;
@@ -209,16 +225,10 @@
     }
     else
     {
-        if (!ValidateTexImageFormatCombination(context, actualInternalFormat, format, type))
+        if (!ValidateTexImageFormatCombination(context, target, actualInternalFormat, format, type))
         {
             return false;
         }
-
-        if (target == GL_TEXTURE_3D && (format == GL_DEPTH_COMPONENT || format == GL_DEPTH_STENCIL))
-        {
-            context->handleError(Error(GL_INVALID_OPERATION));
-            return false;
-        }
     }
 
     // Validate sub image parameters
@@ -452,7 +462,7 @@
                                        const InternalFormat &destFormat,
                                        GLenum *outEffectiveFormat)
 {
-    if (destFormat.pixelBytes > 0)
+    if (destFormat.sized)
     {
         return GetSizedEffectiveInternalFormatInfo(srcFormat, outEffectiveFormat);
     }
@@ -467,13 +477,10 @@
     return first == 0 || first == second;
 }
 
-static bool IsValidES3CopyTexImageCombination(const Format &textureFormat,
-                                              const Format &framebufferFormat,
+static bool IsValidES3CopyTexImageCombination(const InternalFormat &textureFormatInfo,
+                                              const InternalFormat &framebufferFormatInfo,
                                               GLuint readBufferHandle)
 {
-    const auto &textureFormatInfo     = *textureFormat.info;
-    const auto &framebufferFormatInfo = *framebufferFormat.info;
-
     if (!ValidES3CopyConversion(textureFormatInfo.format, framebufferFormatInfo.format))
     {
         return false;
@@ -527,7 +534,7 @@
     {
         // Not the default framebuffer, therefore the read buffer must be a user-created texture or
         // renderbuffer
-        if (framebufferFormat.sized)
+        if (framebufferFormatInfo.sized)
         {
             sourceEffectiveFormat = &framebufferFormatInfo;
         }
@@ -536,9 +543,8 @@
             // Renderbuffers cannot be created with an unsized internal format, so this must be an
             // unsized-format texture. We can use the same table we use when creating textures to
             // get its effective sized format.
-            GLenum sizedInternalFormat =
-                GetSizedInternalFormat(framebufferFormatInfo.format, framebufferFormatInfo.type);
-            sourceEffectiveFormat = &GetInternalFormatInfo(sizedInternalFormat);
+            sourceEffectiveFormat =
+                &GetSizedInternalFormatInfo(framebufferFormatInfo.sizedInternalFormat);
         }
     }
     else
@@ -551,7 +557,7 @@
             if (GetEffectiveInternalFormat(framebufferFormatInfo, textureFormatInfo,
                                            &effectiveFormat))
             {
-                sourceEffectiveFormat = &GetInternalFormatInfo(effectiveFormat);
+                sourceEffectiveFormat = &GetSizedInternalFormatInfo(effectiveFormat);
             }
             else
             {
@@ -561,13 +567,13 @@
         else if (framebufferFormatInfo.colorEncoding == GL_SRGB)
         {
             // SRGB buffers can only be copied to sized format destinations according to table 3.18
-            if (textureFormat.sized &&
+            if (textureFormatInfo.sized &&
                 (framebufferFormatInfo.redBits >= 1 && framebufferFormatInfo.redBits <= 8) &&
                 (framebufferFormatInfo.greenBits >= 1 && framebufferFormatInfo.greenBits <= 8) &&
                 (framebufferFormatInfo.blueBits >= 1 && framebufferFormatInfo.blueBits <= 8) &&
                 (framebufferFormatInfo.alphaBits >= 1 && framebufferFormatInfo.alphaBits <= 8))
             {
-                sourceEffectiveFormat = &GetInternalFormatInfo(GL_SRGB8_ALPHA8);
+                sourceEffectiveFormat = &GetSizedInternalFormatInfo(GL_SRGB8_ALPHA8);
             }
             else
             {
@@ -581,7 +587,7 @@
         }
     }
 
-    if (textureFormat.sized)
+    if (textureFormatInfo.sized)
     {
         // Section 3.8.5 of the GLES 3.0.3 spec, pg 139, requires that, if the destination format is
         // sized, component sizes of the source and destination formats must exactly match if the
@@ -642,7 +648,7 @@
 
     if (isSubImage)
     {
-        if (!IsValidES3CopyTexImageCombination(textureFormat, source->getFormat(),
+        if (!IsValidES3CopyTexImageCombination(*textureFormat.info, *source->getFormat().info,
                                                readFramebufferID))
         {
             context->handleError(Error(GL_INVALID_OPERATION));
@@ -652,8 +658,8 @@
     else
     {
         // Use format/type from the source FBO. (Might not be perfect for all cases?)
-        const auto framebufferFormat = source->getFormat();
-        Format copyFormat(internalformat, framebufferFormat.format, framebufferFormat.type);
+        const InternalFormat &framebufferFormat = *source->getFormat().info;
+        const InternalFormat &copyFormat = GetInternalFormatInfo(internalformat, GL_UNSIGNED_BYTE);
         if (!IsValidES3CopyTexImageCombination(copyFormat, framebufferFormat, readFramebufferID))
         {
             context->handleError(Error(GL_INVALID_OPERATION));
@@ -814,14 +820,14 @@
         return false;
     }
 
-    const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalformat);
+    const gl::InternalFormat &formatInfo = gl::GetSizedInternalFormatInfo(internalformat);
     if (!formatInfo.textureSupport(context->getClientVersion(), context->getExtensions()))
     {
         context->handleError(Error(GL_INVALID_ENUM));
         return false;
     }
 
-    if (formatInfo.pixelBytes == 0)
+    if (!formatInfo.sized)
     {
         context->handleError(Error(GL_INVALID_ENUM));
         return false;
@@ -1168,7 +1174,7 @@
         return false;
     }
 
-    const InternalFormat &formatInfo = GetInternalFormatInfo(internalformat);
+    const InternalFormat &formatInfo = GetSizedInternalFormatInfo(internalformat);
     if (!formatInfo.compressed)
     {
         context->handleError(Error(GL_INVALID_ENUM, "Not a valid compressed texture format"));
@@ -1798,7 +1804,7 @@
         return false;
     }
 
-    const InternalFormat &formatInfo = GetInternalFormatInfo(format);
+    const InternalFormat &formatInfo = GetSizedInternalFormatInfo(format);
     if (!formatInfo.compressed)
     {
         context->handleError(Error(GL_INVALID_ENUM, "Not a valid compressed texture format"));
@@ -2386,7 +2392,7 @@
 
     // The ES3 spec(section 4.4.2) states that the internal format must be sized and not an integer
     // format if samples is greater than zero.
-    const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalformat);
+    const gl::InternalFormat &formatInfo = gl::GetSizedInternalFormatInfo(internalformat);
     if ((formatInfo.componentType == GL_UNSIGNED_INT || formatInfo.componentType == GL_INT) &&
         samples > 0)
     {