Revert of Enable sRGB on iOS, make sRGB decode support optional (patchset #11 id:200001 of https://codereview.chromium.org/2539993002/ )

Reason for revert:
ANGLE tests are failing

Original issue's description:
> Two (related) changes here:
>
> 1) Our older iOS devices failed our sRGB tests, due to precision issues
> with alpha. At this point, we only test on iPadMini 4, and that appears
> not to have any problems.
>
> 2) iOS devices still don't have the sRGB texture decode extension. But,
> some clients have no interest in mixing legacy/color-correct rendering,
> and would like to use sRGB on these devices. This GrContextOptions flag
> enables sRGB support in those cases.
>
> Adjust the test code to produce sRGB capable contexts on these devices,
> but only for configs that have a color space. (See comment).
>
> BUG=skia:4148
>
> Committed: https://skia.googlesource.com/skia/+/9db12d2341f3f8722c8b90b11dd4cce138a8a64e

TBR=bsalomon@google.com
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=skia:4148

Review-Url: https://codereview.chromium.org/2547603002
diff --git a/include/gpu/GrContextOptions.h b/include/gpu/GrContextOptions.h
index 65daf72..6a1b51b 100644
--- a/include/gpu/GrContextOptions.h
+++ b/include/gpu/GrContextOptions.h
@@ -77,14 +77,6 @@
      * purposes.
      */
     bool fForceSWPathMasks = false;
-
-    /**
-     * If true, sRGB support will not be enabled unless sRGB decoding can be disabled (via an
-     * extension). If mixed use of "legacy" mode and sRGB/color-correct mode is not required, this
-     * can be set to false, which will significantly expand the number of devices that qualify for
-     * sRGB support.
-     */
-    bool fRequireDecodeDisableForSRGB = true;
 };
 
 #endif
diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp
index 55647a8..ec0f628 100644
--- a/src/gpu/gl/GrGLCaps.cpp
+++ b/src/gpu/gl/GrGLCaps.cpp
@@ -51,7 +51,6 @@
     fMipMapLevelAndLodControlSupport = false;
     fRGBAToBGRAReadbackConversionsAreSlow = false;
     fDoManualMipmapping = false;
-    fSRGBDecodeDisableSupport = false;
 
     fBlitFramebufferFlags = kNoSupport_BlitFramebufferFlag;
 
@@ -608,11 +607,9 @@
         fDoManualMipmapping = true;
     }
 
-    fSRGBDecodeDisableSupport = ctxInfo.hasExtension("GL_EXT_texture_sRGB_decode");
-
     // Requires fTextureRedSupport, fTextureSwizzleSupport, msaa support, ES compatibility have
     // already been detected.
-    this->initConfigTable(contextOptions, ctxInfo, gli, shaderCaps);
+    this->initConfigTable(ctxInfo, gli, shaderCaps);
 
     this->applyOptionsOverrides(contextOptions);
     shaderCaps->applyOptionsOverrides(contextOptions);
@@ -1392,8 +1389,7 @@
     return true;
 }
 
-void GrGLCaps::initConfigTable(const GrContextOptions& contextOptions,
-                               const GrGLContextInfo& ctxInfo, const GrGLInterface* gli,
+void GrGLCaps::initConfigTable(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli,
                                GrShaderCaps* shaderCaps) {
     /*
         Comments on renderability of configs on various GL versions.
@@ -1579,23 +1575,17 @@
             fSRGBWriteControl = true;
         }
     } else {
-        fSRGBSupport = ctxInfo.version() >= GR_GL_VER(3,0) || ctxInfo.hasExtension("GL_EXT_sRGB");
-#if defined(SK_CPU_X86)
-        if (kPowerVRRogue_GrGLRenderer == ctxInfo.renderer()) {
-            // NexusPlayer has strange bugs with sRGB (skbug.com/4148). This is a targeted fix to
-            // blacklist that device (and any others that might be sharing the same driver).
-            fSRGBSupport = false;
-        }
-#endif
+        // See https://bug.skia.org/4148 for PowerVR issue.
+        fSRGBSupport = kPowerVRRogue_GrGLRenderer != ctxInfo.renderer() &&
+            (ctxInfo.version() >= GR_GL_VER(3,0) || ctxInfo.hasExtension("GL_EXT_sRGB"));
         // ES through 3.1 requires EXT_srgb_write_control to support toggling
         // sRGB writing for destinations.
         // See https://bug.skia.org/5329 for Adreno4xx issue.
         fSRGBWriteControl = kAdreno4xx_GrGLRenderer != ctxInfo.renderer() &&
             ctxInfo.hasExtension("GL_EXT_sRGB_write_control");
     }
-    if (contextOptions.fRequireDecodeDisableForSRGB && !fSRGBDecodeDisableSupport) {
-        // To support "legacy" L32 mode, we require the ability to turn off sRGB decode. Clients
-        // can opt-out of that requirement, if they intend to always do linear blending.
+    if (!ctxInfo.hasExtension("GL_EXT_texture_sRGB_decode")) {
+        // To support "legacy" L32 mode, we require the ability to turn off sRGB decode:
         fSRGBSupport = false;
     }
     fConfigTable[kSRGBA_8888_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_SRGB_ALPHA;
diff --git a/src/gpu/gl/GrGLCaps.h b/src/gpu/gl/GrGLCaps.h
index deb0889..5c7e028 100644
--- a/src/gpu/gl/GrGLCaps.h
+++ b/src/gpu/gl/GrGLCaps.h
@@ -345,8 +345,6 @@
 
     bool doManualMipmapping() const { return fDoManualMipmapping; }
 
-    bool srgbDecodeDisableSupport() const { return fSRGBDecodeDisableSupport; }
-
     /**
      * Returns a string containing the caps info.
      */
@@ -380,8 +378,7 @@
     void initBlendEqationSupport(const GrGLContextInfo&);
     void initStencilFormats(const GrGLContextInfo&);
     // This must be called after initFSAASupport().
-    void initConfigTable(const GrContextOptions&, const GrGLContextInfo&, const GrGLInterface*,
-                         GrShaderCaps*);
+    void initConfigTable(const GrGLContextInfo&, const GrGLInterface*, GrShaderCaps*);
 
     void initShaderPrecisionTable(const GrGLContextInfo&, const GrGLInterface*, GrShaderCaps*);
 
@@ -423,7 +420,6 @@
     bool fMipMapLevelAndLodControlSupport : 1;
     bool fRGBAToBGRAReadbackConversionsAreSlow : 1;
     bool fDoManualMipmapping : 1;
-    bool fSRGBDecodeDisableSupport : 1;
 
     uint32_t fBlitFramebufferFlags;
 
diff --git a/src/gpu/gl/GrGLGpu.cpp b/src/gpu/gl/GrGLGpu.cpp
index 5da585f..4820ca8 100644
--- a/src/gpu/gl/GrGLGpu.cpp
+++ b/src/gpu/gl/GrGLGpu.cpp
@@ -3243,7 +3243,7 @@
     newTexParams.fMinFilter = glMinFilterModes[filterMode];
     newTexParams.fMagFilter = glMagFilterModes[filterMode];
 
-    if (this->glCaps().srgbDecodeDisableSupport() && GrPixelConfigIsSRGB(texture->config())) {
+    if (GrPixelConfigIsSRGB(texture->config())) {
         newTexParams.fSRGBDecode = allowSRGBInputs ? GR_GL_DECODE_EXT : GR_GL_SKIP_DECODE_EXT;
         if (setAll || newTexParams.fSRGBDecode != oldTexParams.fSRGBDecode) {
             this->setTextureUnit(unitIdx);
@@ -3419,7 +3419,7 @@
 
     // Configure sRGB decode, if necessary. This state is the only thing needed for the driver
     // call (glGenerateMipmap) to work correctly. Our manual method dirties other state, too.
-    if (this->glCaps().srgbDecodeDisableSupport() && GrPixelConfigIsSRGB(texture->config())) {
+    if (GrPixelConfigIsSRGB(texture->config())) {
         GL_CALL(TexParameteri(target, GR_GL_TEXTURE_SRGB_DECODE_EXT,
                               allowSRGBInputs ? GR_GL_DECODE_EXT : GR_GL_SKIP_DECODE_EXT));
     }
diff --git a/tests/SRGBMipMapTest.cpp b/tests/SRGBMipMapTest.cpp
index 88f08d4..a875c5c 100644
--- a/tests/SRGBMipMapTest.cpp
+++ b/tests/SRGBMipMapTest.cpp
@@ -10,7 +10,6 @@
 #include "GrCaps.h"
 #include "GrContext.h"
 #include "GrRenderTargetContext.h"
-#include "gl/GrGLGpu.h"
 #include "SkCanvas.h"
 #include "SkSurface.h"
 
@@ -143,18 +142,8 @@
     // 2) Draw texture to L32 surface (should generate/use linear mips)
     paint.setGammaCorrect(false);
     l32RenderTargetContext->drawRect(noClip, paint, SkMatrix::I(), rect);
-
-    // Right now, this test only runs on GL (because Vulkan doesn't support legacy mip-mapping
-    // skbug.com/5048). On GL, we may not have sRGB decode support. In that case, rendering sRGB
-    // textures to a legacy surface produces nonsense, so this part of the test is meaningless.
-    //
-    // TODO: Once Vulkan supports legacy mip-mapping, we can promote this to GrCaps. Right now,
-    // Vulkan has most of the functionality, but not the mip-mapping part that's being tested here.
-    GrGLGpu* glGpu = static_cast<GrGLGpu*>(context->getGpu());
-    if (glGpu->glCaps().srgbDecodeDisableSupport()) {
-        read_and_check_pixels(reporter, l32RenderTargetContext->asTexture().get(), expectedLinear,
-                              error, "re-render as linear");
-    }
+    read_and_check_pixels(reporter, l32RenderTargetContext->asTexture().get(), expectedLinear,
+                          error, "re-render as linear");
 
     // 3) Go back to sRGB
     paint.setGammaCorrect(true);
diff --git a/tools/flags/SkCommonFlagsConfig.cpp b/tools/flags/SkCommonFlagsConfig.cpp
index 5763e4a..abd8bdb 100644
--- a/tools/flags/SkCommonFlagsConfig.cpp
+++ b/tools/flags/SkCommonFlagsConfig.cpp
@@ -185,19 +185,8 @@
     if (useInstanced) {
         fContextOptions |= ContextOptions::kUseInstanced;
     }
-    // Subtle logic: If the config has a color space attached, we're going to be rendering to sRGB,
-    // so we need that capability. In addition, to get the widest test coverage, we DO NOT require
-    // that we can disable sRGB decode. (That's for rendering sRGB sources to legacy surfaces).
-    //
-    // If the config doesn't have a color space attached, we're going to be rendering in legacy
-    // mode. In that case, we can't allow a context to be created that has sRGB support without
-    // the ability to disable sRGB decode. Otherwise, all of our sRGB source resources will be
-    // treated as sRGB textures, but we will be unable to prevent the decode, causing them to be
-    // too dark.
     if (fColorSpace) {
         fContextOptions |= ContextOptions::kRequireSRGBSupport;
-    } else {
-        fContextOptions |= ContextOptions::kRequireSRGBDecodeDisableSupport;
     }
 }
 static bool parse_option_int(const SkString& value, int* outInt) {
diff --git a/tools/gpu/GrContextFactory.cpp b/tools/gpu/GrContextFactory.cpp
index 8d9bd41..ffc5295 100644
--- a/tools/gpu/GrContextFactory.cpp
+++ b/tools/gpu/GrContextFactory.cpp
@@ -198,8 +198,6 @@
     if (ContextOptions::kUseInstanced & options) {
         grOptions.fEnableInstancedRendering = true;
     }
-    grOptions.fRequireDecodeDisableForSRGB =
-        SkToBool(ContextOptions::kRequireSRGBDecodeDisableSupport & options);
     grCtx.reset(GrContext::Create(backend, backendContext, grOptions));
     if (!grCtx.get()) {
         return ContextInfo();
diff --git a/tools/gpu/GrContextFactory.h b/tools/gpu/GrContextFactory.h
index a232112..54b6611 100644
--- a/tools/gpu/GrContextFactory.h
+++ b/tools/gpu/GrContextFactory.h
@@ -93,11 +93,10 @@
      * to not using GL_NV_path_rendering extension  even when the driver supports it.
      */
     enum class ContextOptions {
-        kNone                            = 0x0,
-        kEnableNVPR                      = 0x1,
-        kUseInstanced                    = 0x2,
-        kRequireSRGBSupport              = 0x4,
-        kRequireSRGBDecodeDisableSupport = 0x8,
+        kNone                = 0x0,
+        kEnableNVPR          = 0x1,
+        kUseInstanced        = 0x2,
+        kRequireSRGBSupport  = 0x4,
     };
 
     static ContextType NativeContextTypeForBackend(GrBackend backend) {