Add a function to compute texture sizes
This lets memory traces agree on how big textures are.
Bug: chromium:944846
Change-Id: I5ea8cb5e1331b4ad8d6f59f656c6e61d44290489
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/203727
Reviewed-by: Brian Salomon <bsalomon@google.com>
Commit-Queue: Brian Salomon <bsalomon@google.com>
Auto-Submit: Adrienne Walker <enne@chromium.org>
diff --git a/src/gpu/GrContext.cpp b/src/gpu/GrContext.cpp
index 9713beb..6e19511 100644
--- a/src/gpu/GrContext.cpp
+++ b/src/gpu/GrContext.cpp
@@ -214,6 +214,13 @@
return fResourceCache->getPurgeableBytes();
}
+size_t GrContext::ComputeTextureSize(SkColorType type, int width, int height, GrMipMapped mipMapped,
+ bool useNextPow2) {
+ int colorSamplesPerPixel = 1;
+ return GrSurface::ComputeSize(SkColorType2GrPixelConfig(type), width, height,
+ colorSamplesPerPixel, mipMapped, useNextPow2);
+}
+
////////////////////////////////////////////////////////////////////////////////
int GrContext::maxTextureSize() const { return this->caps()->maxTextureSize(); }