blob: d3a1c62280e12cb0a5168d56d67b34322d50f18a [file] [log] [blame]
Brian Salomon34169692017-08-28 15:32:01 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Brian Salomond7065e72018-10-12 11:42:02 -04008#include <new>
Brian Salomonf19f9ca2019-09-18 15:54:26 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkPoint.h"
11#include "include/core/SkPoint3.h"
Robert Phillipsb7bfbc22020-07-01 12:55:01 -040012#include "include/gpu/GrRecordingContext.h"
Michael Ludwig22429f92019-06-27 10:44:48 -040013#include "include/private/SkFloatingPoint.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "include/private/SkTo.h"
15#include "src/core/SkMathPriv.h"
16#include "src/core/SkMatrixPriv.h"
17#include "src/core/SkRectPriv.h"
18#include "src/gpu/GrAppliedClip.h"
19#include "src/gpu/GrCaps.h"
20#include "src/gpu/GrDrawOpTest.h"
21#include "src/gpu/GrGeometryProcessor.h"
22#include "src/gpu/GrGpu.h"
23#include "src/gpu/GrMemoryPool.h"
24#include "src/gpu/GrOpFlushState.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/GrRecordingContextPriv.h"
26#include "src/gpu/GrResourceProvider.h"
27#include "src/gpu/GrResourceProviderPriv.h"
28#include "src/gpu/GrShaderCaps.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000029#include "src/gpu/GrTexture.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040030#include "src/gpu/GrTextureProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050031#include "src/gpu/SkGr.h"
John Stilesf743d4e2020-07-23 11:35:08 -040032#include "src/gpu/effects/GrBlendFragmentProcessor.h"
Brian Osman6f5e9402020-01-22 10:39:31 -050033#include "src/gpu/effects/generated/GrClampFragmentProcessor.h"
Michael Ludwigfd4f4df2019-05-29 09:51:09 -040034#include "src/gpu/geometry/GrQuad.h"
Michael Ludwig425eb452019-06-27 10:13:27 -040035#include "src/gpu/geometry/GrQuadBuffer.h"
Michael Ludwig0f809022019-06-04 09:14:37 -040036#include "src/gpu/geometry/GrQuadUtils.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050037#include "src/gpu/glsl/GrGLSLVarying.h"
Michael Ludwig22429f92019-06-27 10:44:48 -040038#include "src/gpu/ops/GrFillRectOp.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050039#include "src/gpu/ops/GrMeshDrawOp.h"
40#include "src/gpu/ops/GrQuadPerEdgeAA.h"
Robert Phillips3968fcb2019-12-05 16:40:31 -050041#include "src/gpu/ops/GrSimpleMeshDrawOpHelper.h"
Brian Salomonf19f9ca2019-09-18 15:54:26 -040042#include "src/gpu/ops/GrTextureOp.h"
Brian Salomon34169692017-08-28 15:32:01 -040043
44namespace {
45
Brian Salomon2432d062020-04-16 20:48:09 -040046using Subset = GrQuadPerEdgeAA::Subset;
Michael Ludwigc182b942018-11-16 10:27:51 -050047using VertexSpec = GrQuadPerEdgeAA::VertexSpec;
Brian Osman3d139a42018-11-19 10:42:10 -050048using ColorType = GrQuadPerEdgeAA::ColorType;
Brian Salomonb80ffee2018-05-23 16:39:39 -040049
Michael Ludwig22429f92019-06-27 10:44:48 -040050// Extracts lengths of vertical and horizontal edges of axis-aligned quad. "width" is the edge
51// between v0 and v2 (or v1 and v3), "height" is the edge between v0 and v1 (or v2 and v3).
52static SkSize axis_aligned_quad_size(const GrQuad& quad) {
53 SkASSERT(quad.quadType() == GrQuad::Type::kAxisAligned);
54 // Simplification of regular edge length equation, since it's axis aligned and can avoid sqrt
55 float dw = sk_float_abs(quad.x(2) - quad.x(0)) + sk_float_abs(quad.y(2) - quad.y(0));
56 float dh = sk_float_abs(quad.x(1) - quad.x(0)) + sk_float_abs(quad.y(1) - quad.y(0));
57 return {dw, dh};
58}
59
Brian Salomone69b9ef2020-07-22 11:18:06 -040060static std::tuple<bool /* filter */,
61 bool /* mipmap */>
62filter_and_mm_have_effect(const GrQuad& srcQuad, const GrQuad& dstQuad) {
Michael Ludwig22429f92019-06-27 10:44:48 -040063 // If not axis-aligned in src or dst, then always say it has an effect
64 if (srcQuad.quadType() != GrQuad::Type::kAxisAligned ||
65 dstQuad.quadType() != GrQuad::Type::kAxisAligned) {
Brian Salomone69b9ef2020-07-22 11:18:06 -040066 return {true, true};
Michael Ludwig22429f92019-06-27 10:44:48 -040067 }
68
69 SkRect srcRect;
70 SkRect dstRect;
71 if (srcQuad.asRect(&srcRect) && dstQuad.asRect(&dstRect)) {
72 // Disable filtering when there is no scaling (width and height are the same), and the
73 // top-left corners have the same fraction (so src and dst snap to the pixel grid
74 // identically).
75 SkASSERT(srcRect.isSorted());
Brian Salomone69b9ef2020-07-22 11:18:06 -040076 bool filter = srcRect.width() != dstRect.width() || srcRect.height() != dstRect.height() ||
77 SkScalarFraction(srcRect.fLeft) != SkScalarFraction(dstRect.fLeft) ||
78 SkScalarFraction(srcRect.fTop) != SkScalarFraction(dstRect.fTop);
79 bool mm = srcRect.width() > dstRect.width() || srcRect.height() > dstRect.height();
80 return {filter, mm};
Michael Ludwig22429f92019-06-27 10:44:48 -040081 }
Brian Salomone69b9ef2020-07-22 11:18:06 -040082 // Extract edge lengths
83 SkSize srcSize = axis_aligned_quad_size(srcQuad);
84 SkSize dstSize = axis_aligned_quad_size(dstQuad);
85 // Although the quads are axis-aligned, the local coordinate system is transformed such
86 // that fractionally-aligned sample centers will not align with the device coordinate system
87 // So disable filtering when edges are the same length and both srcQuad and dstQuad
88 // 0th vertex is integer aligned.
89 bool filter = srcSize != dstSize ||
90 !SkScalarIsInt(srcQuad.x(0)) ||
91 !SkScalarIsInt(srcQuad.y(0)) ||
92 !SkScalarIsInt(dstQuad.x(0)) ||
93 !SkScalarIsInt(dstQuad.y(0));
94 bool mm = srcSize.fWidth > dstSize.fWidth || srcSize.fHeight > dstSize.fHeight;
95 return {filter, mm};
Michael Ludwig22429f92019-06-27 10:44:48 -040096}
97
Michael Ludwig119ac6d2019-11-21 09:26:46 -050098// Describes function for normalizing src coords: [x * iw, y * ih + yOffset] can represent
99// regular and rectangular textures, w/ or w/o origin correction.
100struct NormalizationParams {
101 float fIW; // 1 / width of texture, or 1.0 for texture rectangles
Michael Ludwigc453a502020-05-29 12:29:12 -0400102 float fInvH; // 1 / height of texture, or 1.0 for tex rects, X -1 if bottom-left origin
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500103 float fYOffset; // 0 for top-left origin, height of [normalized] tex if bottom-left
104};
Michael Ludwigadb12e72019-12-04 16:19:18 -0500105static NormalizationParams proxy_normalization_params(const GrSurfaceProxy* proxy,
106 GrSurfaceOrigin origin) {
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500107 // Whether or not the proxy is instantiated, this is the size its texture will be, so we can
108 // normalize the src coordinates up front.
Michael Ludwigadb12e72019-12-04 16:19:18 -0500109 SkISize dimensions = proxy->backingStoreDimensions();
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500110 float iw, ih, h;
Michael Ludwigadb12e72019-12-04 16:19:18 -0500111 if (proxy->backendFormat().textureType() == GrTextureType::kRectangle) {
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500112 iw = ih = 1.f;
113 h = dimensions.height();
114 } else {
115 iw = 1.f / dimensions.width();
116 ih = 1.f / dimensions.height();
117 h = 1.f;
118 }
119
Michael Ludwigadb12e72019-12-04 16:19:18 -0500120 if (origin == kBottomLeft_GrSurfaceOrigin) {
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500121 return {iw, -ih, h};
122 } else {
123 return {iw, ih, 0.0f};
124 }
125}
126
Brian Salomon2432d062020-04-16 20:48:09 -0400127// Normalize the subset. If 'subsetRect' is null, it is assumed no subset constraint is desired,
Michael Ludwig7c6a4a82020-02-07 10:14:26 -0500128// so a sufficiently large rect is returned even if the quad ends up batched with an op that uses
Brian Salomon75cebbe2020-05-18 14:08:14 -0400129// subsets overall. When there is a subset it will be inset based on the filter mode. Normalization
130// and y-flipping are applied as indicated by NormalizationParams.
131static SkRect normalize_and_inset_subset(GrSamplerState::Filter filter,
132 const NormalizationParams& params,
133 const SkRect* subsetRect) {
Brian Salomon246bc3d2018-12-06 15:33:02 -0500134 static constexpr SkRect kLargeRect = {-100000, -100000, 1000000, 1000000};
Brian Salomon2432d062020-04-16 20:48:09 -0400135 if (!subsetRect) {
136 // Either the quad has no subset constraint and is batched with a subset constrained op
137 // (in which case we want a subset that doesn't restrict normalized tex coords), or the
138 // entire op doesn't use the subset, in which case the returned value is ignored.
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500139 return kLargeRect;
Michael Ludwig460eb5e2018-10-29 11:09:29 -0400140 }
141
Brian Salomon2432d062020-04-16 20:48:09 -0400142 auto ltrb = skvx::Vec<4, float>::Load(subsetRect);
Brian Salomon75cebbe2020-05-18 14:08:14 -0400143 auto flipHi = skvx::Vec<4, float>({1.f, 1.f, -1.f, -1.f});
144 if (filter == GrSamplerState::Filter::kNearest) {
145 // Make sure our insetting puts us at pixel centers.
146 ltrb = skvx::floor(ltrb*flipHi)*flipHi;
147 }
148 // Inset with pin to the rect center.
149 ltrb += skvx::Vec<4, float>({.5f, .5f, -.5f, -.5f});
150 auto mid = (skvx::shuffle<2, 3, 0, 1>(ltrb) + ltrb)*0.5f;
151 ltrb = skvx::min(ltrb*flipHi, mid*flipHi)*flipHi;
152
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500153 // Normalize and offset
Elliot Evans9fdddf62020-07-29 16:39:11 -0400154 ltrb = ltrb * skvx::Vec<4, float>{params.fIW, params.fInvH, params.fIW, params.fInvH} +
155 skvx::Vec<4, float>{0.f, params.fYOffset, 0.f, params.fYOffset};
Michael Ludwigc453a502020-05-29 12:29:12 -0400156 if (params.fInvH < 0.f) {
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500157 // Flip top and bottom to keep the rect sorted when loaded back to SkRect.
158 ltrb = skvx::shuffle<0, 3, 2, 1>(ltrb);
Michael Ludwig460eb5e2018-10-29 11:09:29 -0400159 }
160
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500161 SkRect out;
162 ltrb.store(&out);
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500163 return out;
Michael Ludwig460eb5e2018-10-29 11:09:29 -0400164}
165
Michael Ludwig009b92e2019-02-15 16:03:53 -0500166// Normalizes logical src coords and corrects for origin
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500167static void normalize_src_quad(const NormalizationParams& params,
168 GrQuad* srcQuad) {
Michael Ludwig009b92e2019-02-15 16:03:53 -0500169 // The src quad should not have any perspective
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500170 SkASSERT(!srcQuad->hasPerspective());
171 skvx::Vec<4, float> xs = srcQuad->x4f() * params.fIW;
Elliot Evans9fdddf62020-07-29 16:39:11 -0400172 skvx::Vec<4, float> ys = srcQuad->y4f() * params.fInvH + params.fYOffset;
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500173 xs.store(srcQuad->xs());
174 ys.store(srcQuad->ys());
Michael Ludwig009b92e2019-02-15 16:03:53 -0500175}
Michael Ludwig460eb5e2018-10-29 11:09:29 -0400176
Michael Ludwig379e4962019-12-06 13:21:26 -0500177// Count the number of proxy runs in the entry set. This usually is already computed by
178// SkGpuDevice, but when the BatchLengthLimiter chops the set up it must determine a new proxy count
179// for each split.
180static int proxy_run_count(const GrRenderTargetContext::TextureSetEntry set[], int count) {
181 int actualProxyRunCount = 0;
182 const GrSurfaceProxy* lastProxy = nullptr;
183 for (int i = 0; i < count; ++i) {
184 if (set[i].fProxyView.proxy() != lastProxy) {
185 actualProxyRunCount++;
186 lastProxy = set[i].fProxyView.proxy();
187 }
188 }
189 return actualProxyRunCount;
190}
191
John Stilescbe4e282020-06-01 10:38:31 -0400192static bool safe_to_ignore_subset_rect(GrAAType aaType, GrSamplerState::Filter filter,
193 const DrawQuad& quad, const SkRect& subsetRect) {
194 // If both the device and local quad are both axis-aligned, and filtering is off, the local quad
195 // can push all the way up to the edges of the the subset rect and the sampler shouldn't
196 // overshoot. Unfortunately, antialiasing adds enough jitter that we can only rely on this in
197 // the non-antialiased case.
198 SkRect localBounds = quad.fLocal.bounds();
199 if (aaType == GrAAType::kNone &&
200 filter == GrSamplerState::Filter::kNearest &&
201 quad.fDevice.quadType() == GrQuad::Type::kAxisAligned &&
202 quad.fLocal.quadType() == GrQuad::Type::kAxisAligned &&
203 subsetRect.contains(localBounds)) {
204
205 return true;
206 }
207
208 // If the subset rect is inset by at least 0.5 pixels into the local quad's bounds, the
209 // sampler shouldn't overshoot, even when antialiasing and filtering is taken into account.
210 if (subsetRect.makeInset(0.5f, 0.5f).contains(localBounds)) {
211 return true;
212 }
213
214 // The subset rect cannot be ignored safely.
215 return false;
216}
217
Brian Salomon34169692017-08-28 15:32:01 -0400218/**
219 * Op that implements GrTextureOp::Make. It draws textured quads. Each quad can modulate against a
220 * the texture by color. The blend with the destination is always src-over. The edges are non-AA.
221 */
222class TextureOp final : public GrMeshDrawOp {
223public:
Robert Phillipsb97da532019-02-12 15:24:12 -0500224 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Greg Daniel549325c2019-10-30 16:19:20 -0400225 GrSurfaceProxyView proxyView,
Michael Ludwig22429f92019-06-27 10:44:48 -0400226 sk_sp<GrColorSpaceXform> textureXform,
Robert Phillips7c525e62018-06-12 10:11:12 -0400227 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -0400228 GrSamplerState::MipmapMode mm,
Brian Osman3d139a42018-11-19 10:42:10 -0500229 const SkPMColor4f& color,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400230 GrTextureOp::Saturate saturate,
Robert Phillips7c525e62018-06-12 10:11:12 -0400231 GrAAType aaType,
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500232 DrawQuad* quad,
Brian Salomon2432d062020-04-16 20:48:09 -0400233 const SkRect* subset) {
Michael Ludwig009b92e2019-02-15 16:03:53 -0500234 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Brian Salomone69b9ef2020-07-22 11:18:06 -0400235 return pool->allocate<TextureOp>(std::move(proxyView), std::move(textureXform), filter, mm,
Brian Salomon2432d062020-04-16 20:48:09 -0400236 color, saturate, aaType, quad, subset);
Brian Salomon34169692017-08-28 15:32:01 -0400237 }
Robert Phillipse837e612019-11-15 11:02:50 -0500238
Robert Phillipsb97da532019-02-12 15:24:12 -0500239 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Michael Ludwigadb12e72019-12-04 16:19:18 -0500240 GrRenderTargetContext::TextureSetEntry set[],
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400241 int cnt,
Michael Ludwig379e4962019-12-06 13:21:26 -0500242 int proxyRunCnt,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400243 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -0400244 GrSamplerState::MipmapMode mm,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400245 GrTextureOp::Saturate saturate,
246 GrAAType aaType,
Michael Ludwig31ba7182019-04-03 10:38:06 -0400247 SkCanvas::SrcRectConstraint constraint,
Brian Salomond003d222018-11-26 13:25:05 -0500248 const SkMatrix& viewMatrix,
Brian Osman3d139a42018-11-19 10:42:10 -0500249 sk_sp<GrColorSpaceXform> textureColorSpaceXform) {
Michael Ludwig379e4962019-12-06 13:21:26 -0500250 // Allocate size based on proxyRunCnt, since that determines number of ViewCountPairs.
251 SkASSERT(proxyRunCnt <= cnt);
252
253 size_t size = sizeof(TextureOp) + sizeof(ViewCountPair) * (proxyRunCnt - 1);
Robert Phillips9da87e02019-02-04 13:26:26 -0500254 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Brian Salomond7065e72018-10-12 11:42:02 -0400255 void* mem = pool->allocate(size);
Michael Ludwig379e4962019-12-06 13:21:26 -0500256 return std::unique_ptr<GrDrawOp>(
Brian Salomone69b9ef2020-07-22 11:18:06 -0400257 new (mem) TextureOp(set, cnt, proxyRunCnt, filter, mm, saturate, aaType, constraint,
Michael Ludwig379e4962019-12-06 13:21:26 -0500258 viewMatrix, std::move(textureColorSpaceXform)));
Brian Salomond7065e72018-10-12 11:42:02 -0400259 }
Brian Salomon34169692017-08-28 15:32:01 -0400260
Brian Salomon336ce7b2017-09-08 08:23:58 -0400261 ~TextureOp() override {
Michael Ludwigadb12e72019-12-04 16:19:18 -0500262 for (unsigned p = 1; p < fMetadata.fProxyCount; ++p) {
Greg Daniel549325c2019-10-30 16:19:20 -0400263 fViewCountPairs[p].~ViewCountPair();
Brian Salomon336ce7b2017-09-08 08:23:58 -0400264 }
265 }
Brian Salomon34169692017-08-28 15:32:01 -0400266
267 const char* name() const override { return "TextureOp"; }
268
Chris Dalton1706cbf2019-05-21 19:35:29 -0600269 void visitProxies(const VisitProxyFunc& func) const override {
Brian Salomone69b9ef2020-07-22 11:18:06 -0400270 bool mipped = (fMetadata.mipmapMode() != GrSamplerState::MipmapMode::kNone);
Michael Ludwigadb12e72019-12-04 16:19:18 -0500271 for (unsigned p = 0; p < fMetadata.fProxyCount; ++p) {
Brian Salomon7e67dca2020-07-21 09:27:25 -0400272 func(fViewCountPairs[p].fProxy.get(), GrMipmapped(mipped));
Brian Salomond7065e72018-10-12 11:42:02 -0400273 }
Chris Daltondbb833b2020-03-17 12:15:46 -0600274 if (fDesc && fDesc->fProgramInfo) {
275 fDesc->fProgramInfo->visitFPProxies(func);
276 }
Brian Salomond7065e72018-10-12 11:42:02 -0400277 }
Robert Phillipsb493eeb2017-09-13 13:10:52 -0400278
Brian Osman9a390ac2018-11-12 09:47:48 -0500279#ifdef SK_DEBUG
Brian Salomon34169692017-08-28 15:32:01 -0400280 SkString dumpInfo() const override {
281 SkString str;
Brian Salomond7065e72018-10-12 11:42:02 -0400282 str.appendf("# draws: %d\n", fQuads.count());
Michael Ludwig425eb452019-06-27 10:13:27 -0400283 auto iter = fQuads.iterator();
Michael Ludwigadb12e72019-12-04 16:19:18 -0500284 for (unsigned p = 0; p < fMetadata.fProxyCount; ++p) {
Brian Salomone69b9ef2020-07-22 11:18:06 -0400285 str.appendf("Proxy ID: %d, Filter: %d, MM: %d\n",
Michael Ludwigadb12e72019-12-04 16:19:18 -0500286 fViewCountPairs[p].fProxy->uniqueID().asUInt(),
Brian Salomone69b9ef2020-07-22 11:18:06 -0400287 static_cast<int>(fMetadata.fFilter),
288 static_cast<int>(fMetadata.fMipmapMode));
Michael Ludwig425eb452019-06-27 10:13:27 -0400289 int i = 0;
Greg Daniel549325c2019-10-30 16:19:20 -0400290 while(i < fViewCountPairs[p].fQuadCnt && iter.next()) {
Michael Ludwig704d5402019-11-25 09:43:37 -0500291 const GrQuad* quad = iter.deviceQuad();
292 GrQuad uv = iter.isLocalValid() ? *(iter.localQuad()) : GrQuad();
Brian Salomon2432d062020-04-16 20:48:09 -0400293 const ColorSubsetAndAA& info = iter.metadata();
Brian Salomond7065e72018-10-12 11:42:02 -0400294 str.appendf(
Brian Salomon2432d062020-04-16 20:48:09 -0400295 "%d: Color: 0x%08x, Subset(%d): [L: %.2f, T: %.2f, R: %.2f, B: %.2f]\n"
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400296 " UVs [(%.2f, %.2f), (%.2f, %.2f), (%.2f, %.2f), (%.2f, %.2f)]\n"
297 " Quad [(%.2f, %.2f), (%.2f, %.2f), (%.2f, %.2f), (%.2f, %.2f)]\n",
Brian Salomon2432d062020-04-16 20:48:09 -0400298 i, info.fColor.toBytes_RGBA(), fMetadata.fSubset, info.fSubsetRect.fLeft,
299 info.fSubsetRect.fTop, info.fSubsetRect.fRight, info.fSubsetRect.fBottom,
Michael Ludwig704d5402019-11-25 09:43:37 -0500300 quad->point(0).fX, quad->point(0).fY, quad->point(1).fX, quad->point(1).fY,
301 quad->point(2).fX, quad->point(2).fY, quad->point(3).fX, quad->point(3).fY,
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400302 uv.point(0).fX, uv.point(0).fY, uv.point(1).fX, uv.point(1).fY,
303 uv.point(2).fX, uv.point(2).fY, uv.point(3).fX, uv.point(3).fY);
304
Michael Ludwig425eb452019-06-27 10:13:27 -0400305 i++;
Brian Salomond7065e72018-10-12 11:42:02 -0400306 }
Brian Salomon34169692017-08-28 15:32:01 -0400307 }
308 str += INHERITED::dumpInfo();
309 return str;
310 }
Michael Ludwig4ef1ca12019-12-19 10:58:52 -0500311
312 static void ValidateResourceLimits() {
313 // The op implementation has an upper bound on the number of quads that it can represent.
314 // However, the resource manager imposes its own limit on the number of quads, which should
315 // always be lower than the numerical limit this op can hold.
316 using CountStorage = decltype(Metadata::fTotalQuadCount);
317 CountStorage maxQuadCount = std::numeric_limits<CountStorage>::max();
318 // GrResourceProvider::Max...() is typed as int, so don't compare across signed/unsigned.
319 int resourceLimit = SkTo<int>(maxQuadCount);
320 SkASSERT(GrResourceProvider::MaxNumAAQuads() <= resourceLimit &&
321 GrResourceProvider::MaxNumNonAAQuads() <= resourceLimit);
322 }
Brian Osman9a390ac2018-11-12 09:47:48 -0500323#endif
Brian Salomon34169692017-08-28 15:32:01 -0400324
Brian Osman5ced0bf2019-03-15 10:15:29 -0400325 GrProcessorSet::Analysis finalize(
Chris Dalton6ce447a2019-06-23 18:07:38 -0600326 const GrCaps& caps, const GrAppliedClip*, bool hasMixedSampledCoverage,
327 GrClampType clampType) override {
Michael Ludwigadb12e72019-12-04 16:19:18 -0500328 SkASSERT(fMetadata.colorType() == ColorType::kNone);
Michael Ludwig425eb452019-06-27 10:13:27 -0400329 auto iter = fQuads.metadata();
330 while(iter.next()) {
Brian Osman2715bf52019-12-06 14:38:47 -0500331 auto colorType = GrQuadPerEdgeAA::MinColorType(iter->fColor);
Brian Osman788b9162020-02-07 10:36:46 -0500332 fMetadata.fColorType = std::max(fMetadata.fColorType, static_cast<uint16_t>(colorType));
Brian Osman8fa7ab42019-03-18 10:22:42 -0400333 }
Chris Dalton4b62aed2019-01-15 11:53:00 -0700334 return GrProcessorSet::EmptySetAnalysis();
Brian Salomon34169692017-08-28 15:32:01 -0400335 }
336
Brian Salomon485b8c62018-01-12 15:11:06 -0500337 FixedFunctionFlags fixedFunctionFlags() const override {
Michael Ludwigadb12e72019-12-04 16:19:18 -0500338 return fMetadata.aaType() == GrAAType::kMSAA ? FixedFunctionFlags::kUsesHWAA
339 : FixedFunctionFlags::kNone;
Brian Salomon485b8c62018-01-12 15:11:06 -0500340 }
Brian Salomon34169692017-08-28 15:32:01 -0400341
342 DEFINE_OP_CLASS_ID
343
344private:
Robert Phillips7c525e62018-06-12 10:11:12 -0400345 friend class ::GrOpMemoryPool;
Brian Salomon762d5e72017-12-01 10:25:08 -0500346
Brian Salomon2432d062020-04-16 20:48:09 -0400347 struct ColorSubsetAndAA {
348 ColorSubsetAndAA(const SkPMColor4f& color, const SkRect& subsetRect, GrQuadAAFlags aaFlags)
Michael Ludwig425eb452019-06-27 10:13:27 -0400349 : fColor(color)
Brian Salomon2432d062020-04-16 20:48:09 -0400350 , fSubsetRect(subsetRect)
Michael Ludwig4384f042019-12-05 10:30:35 -0500351 , fAAFlags(static_cast<uint16_t>(aaFlags)) {
352 SkASSERT(fAAFlags == static_cast<uint16_t>(aaFlags));
Michael Ludwig425eb452019-06-27 10:13:27 -0400353 }
Michael Ludwig425eb452019-06-27 10:13:27 -0400354
355 SkPMColor4f fColor;
Brian Salomon2432d062020-04-16 20:48:09 -0400356 // If the op doesn't use subsets, this is ignored. If the op uses subsets and the specific
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500357 // entry does not, this rect will equal kLargeRect, so it automatically has no effect.
Brian Salomon2432d062020-04-16 20:48:09 -0400358 SkRect fSubsetRect;
Michael Ludwig425eb452019-06-27 10:13:27 -0400359 unsigned fAAFlags : 4;
360
Michael Ludwig425eb452019-06-27 10:13:27 -0400361 GrQuadAAFlags aaFlags() const { return static_cast<GrQuadAAFlags>(fAAFlags); }
362 };
Michael Ludwigadb12e72019-12-04 16:19:18 -0500363
Greg Daniel549325c2019-10-30 16:19:20 -0400364 struct ViewCountPair {
Michael Ludwigadb12e72019-12-04 16:19:18 -0500365 // Normally this would be a GrSurfaceProxyView, but GrTextureOp applies the GrOrigin right
366 // away so it doesn't need to be stored, and all ViewCountPairs in an op have the same
367 // swizzle so that is stored in the op metadata.
368 sk_sp<GrSurfaceProxy> fProxy;
Michael Ludwig425eb452019-06-27 10:13:27 -0400369 int fQuadCnt;
370 };
371
Michael Ludwigadb12e72019-12-04 16:19:18 -0500372 // TextureOp and ViewCountPair are 8 byte aligned. This is packed into 8 bytes to minimally
373 // increase the size of the op; increasing the op size can have a surprising impact on
374 // performance (since texture ops are one of the most commonly used in an app).
375 struct Metadata {
376 // AAType must be filled after initialization; ColorType is determined in finalize()
Brian Salomone69b9ef2020-07-22 11:18:06 -0400377 Metadata(const GrSwizzle& swizzle,
378 GrSamplerState::Filter filter,
379 GrSamplerState::MipmapMode mm,
380 GrQuadPerEdgeAA::Subset subset,
381 GrTextureOp::Saturate saturate)
Michael Ludwigadb12e72019-12-04 16:19:18 -0500382 : fSwizzle(swizzle)
383 , fProxyCount(1)
384 , fTotalQuadCount(1)
Michael Ludwig4384f042019-12-05 10:30:35 -0500385 , fFilter(static_cast<uint16_t>(filter))
Brian Salomone69b9ef2020-07-22 11:18:06 -0400386 , fMipmapMode(static_cast<uint16_t>(mm))
Michael Ludwig4384f042019-12-05 10:30:35 -0500387 , fAAType(static_cast<uint16_t>(GrAAType::kNone))
388 , fColorType(static_cast<uint16_t>(ColorType::kNone))
Brian Salomon2432d062020-04-16 20:48:09 -0400389 , fSubset(static_cast<uint16_t>(subset))
Michael Ludwig4384f042019-12-05 10:30:35 -0500390 , fSaturate(static_cast<uint16_t>(saturate)) {}
Michael Ludwigadb12e72019-12-04 16:19:18 -0500391
Michael Ludwig4384f042019-12-05 10:30:35 -0500392 GrSwizzle fSwizzle; // sizeof(GrSwizzle) == uint16_t
Michael Ludwigadb12e72019-12-04 16:19:18 -0500393 uint16_t fProxyCount;
394 // This will be >= fProxyCount, since a proxy may be drawn multiple times
395 uint16_t fTotalQuadCount;
396
Michael Ludwig4384f042019-12-05 10:30:35 -0500397 // These must be based on uint16_t to help MSVC's pack bitfields optimally
398 uint16_t fFilter : 2; // GrSamplerState::Filter
Brian Salomone69b9ef2020-07-22 11:18:06 -0400399 uint16_t fMipmapMode : 2; // GrSamplerState::MipmapMode
Michael Ludwig4384f042019-12-05 10:30:35 -0500400 uint16_t fAAType : 2; // GrAAType
401 uint16_t fColorType : 2; // GrQuadPerEdgeAA::ColorType
Brian Salomon2432d062020-04-16 20:48:09 -0400402 uint16_t fSubset : 1; // bool
Michael Ludwig4384f042019-12-05 10:30:35 -0500403 uint16_t fSaturate : 1; // bool
Brian Salomone69b9ef2020-07-22 11:18:06 -0400404 uint16_t fUnused : 6; // # of bits left before Metadata exceeds 8 bytes
Michael Ludwigadb12e72019-12-04 16:19:18 -0500405
406 GrSamplerState::Filter filter() const {
407 return static_cast<GrSamplerState::Filter>(fFilter);
408 }
Brian Salomone69b9ef2020-07-22 11:18:06 -0400409 GrSamplerState::MipmapMode mipmapMode() const {
410 return static_cast<GrSamplerState::MipmapMode>(fMipmapMode);
411 }
Michael Ludwigadb12e72019-12-04 16:19:18 -0500412 GrAAType aaType() const { return static_cast<GrAAType>(fAAType); }
413 ColorType colorType() const { return static_cast<ColorType>(fColorType); }
Brian Salomon2432d062020-04-16 20:48:09 -0400414 Subset subset() const { return static_cast<Subset>(fSubset); }
Michael Ludwigadb12e72019-12-04 16:19:18 -0500415 GrTextureOp::Saturate saturate() const {
416 return static_cast<GrTextureOp::Saturate>(fSaturate);
417 }
418
419 static_assert(GrSamplerState::kFilterCount <= 4);
420 static_assert(kGrAATypeCount <= 4);
421 static_assert(GrQuadPerEdgeAA::kColorTypeCount <= 4);
422 };
Michael Ludwig4384f042019-12-05 10:30:35 -0500423 static_assert(sizeof(Metadata) == 8);
Michael Ludwigadb12e72019-12-04 16:19:18 -0500424
Chris Daltondbb833b2020-03-17 12:15:46 -0600425 // This descriptor is used to store the draw info we decide on during on(Pre)PrepareDraws. We
426 // store the data in a separate struct in order to minimize the size of the TextureOp.
427 // Historically, increasing the TextureOp's size has caused surprising perf regressions, but we
428 // may want to re-evaluate whether this is still necessary.
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400429 //
Chris Daltondbb833b2020-03-17 12:15:46 -0600430 // In the onPrePrepareDraws case it is allocated in the creation-time opData arena, and
431 // allocatePrePreparedVertices is also called.
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400432 //
Chris Daltondbb833b2020-03-17 12:15:46 -0600433 // In the onPrepareDraws case this descriptor is allocated in the flush-time arena (i.e., as
434 // part of the flushState).
435 struct Desc {
436 VertexSpec fVertexSpec;
437 int fNumProxies = 0;
438 int fNumTotalQuads = 0;
Robert Phillips32803ff2019-10-23 08:26:08 -0400439
Chris Daltondbb833b2020-03-17 12:15:46 -0600440 // This member variable is only used by 'onPrePrepareDraws'.
441 char* fPrePreparedVertices = nullptr;
442
443 GrProgramInfo* fProgramInfo = nullptr;
444
445 sk_sp<const GrBuffer> fIndexBuffer;
446 sk_sp<const GrBuffer> fVertexBuffer;
447 int fBaseVertex;
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400448
449 // How big should 'fVertices' be to hold all the vertex data?
450 size_t totalSizeInBytes() const {
Chris Daltondbb833b2020-03-17 12:15:46 -0600451 return this->totalNumVertices() * fVertexSpec.vertexSize();
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400452 }
453
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400454 int totalNumVertices() const {
455 return fNumTotalQuads * fVertexSpec.verticesPerQuad();
456 }
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400457
Chris Daltondbb833b2020-03-17 12:15:46 -0600458 void allocatePrePreparedVertices(SkArenaAlloc* arena) {
459 fPrePreparedVertices = arena->makeArrayDefault<char>(this->totalSizeInBytes());
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400460 }
Robert Phillips32803ff2019-10-23 08:26:08 -0400461 };
Brian Salomon2432d062020-04-16 20:48:09 -0400462 // If subsetRect is not null it will be used to apply a strict src rect-style constraint.
Greg Daniel549325c2019-10-30 16:19:20 -0400463 TextureOp(GrSurfaceProxyView proxyView,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400464 sk_sp<GrColorSpaceXform> textureColorSpaceXform,
465 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -0400466 GrSamplerState::MipmapMode mm,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400467 const SkPMColor4f& color,
468 GrTextureOp::Saturate saturate,
469 GrAAType aaType,
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500470 DrawQuad* quad,
Brian Salomon2432d062020-04-16 20:48:09 -0400471 const SkRect* subsetRect)
Brian Salomon34169692017-08-28 15:32:01 -0400472 : INHERITED(ClassID())
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400473 , fQuads(1, true /* includes locals */)
Brian Osman3ebd3542018-07-30 14:36:53 -0400474 , fTextureColorSpaceXform(std::move(textureColorSpaceXform))
Chris Daltondbb833b2020-03-17 12:15:46 -0600475 , fDesc(nullptr)
Brian Salomone69b9ef2020-07-22 11:18:06 -0400476 , fMetadata(proxyView.swizzle(), filter, mm, Subset(!!subsetRect), saturate) {
Michael Ludwig6bee7762018-10-19 09:50:36 -0400477 // Clean up disparities between the overall aa type and edge configuration and apply
478 // optimizations based on the rect and matrix when appropriate
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500479 GrQuadUtils::ResolveAAType(aaType, quad->fEdgeFlags, quad->fDevice,
480 &aaType, &quad->fEdgeFlags);
Michael Ludwig4384f042019-12-05 10:30:35 -0500481 fMetadata.fAAType = static_cast<uint16_t>(aaType);
Michael Ludwig6bee7762018-10-19 09:50:36 -0400482
Brian Salomonf1709042018-10-03 11:57:00 -0400483 // We expect our caller to have already caught this optimization.
Brian Salomon2432d062020-04-16 20:48:09 -0400484 SkASSERT(!subsetRect ||
485 !subsetRect->contains(proxyView.proxy()->backingStoreBoundsRect()));
Michael Ludwig009b92e2019-02-15 16:03:53 -0500486
Brian Salomonf09abc52018-10-03 15:59:04 -0400487 // We may have had a strict constraint with nearest filter solely due to possible AA bloat.
John Stilescbe4e282020-06-01 10:38:31 -0400488 // Try to identify cases where the subsetting isn't actually necessary, and skip it.
489 if (subsetRect) {
490 if (safe_to_ignore_subset_rect(aaType, filter, *quad, *subsetRect)) {
491 subsetRect = nullptr;
492 fMetadata.fSubset = static_cast<uint16_t>(Subset::kNo);
493 }
Brian Salomonf09abc52018-10-03 15:59:04 -0400494 }
Michael Ludwigc96fc372019-01-08 15:46:15 -0500495
Brian Salomon2432d062020-04-16 20:48:09 -0400496 // Normalize src coordinates and the subset (if set)
Michael Ludwigadb12e72019-12-04 16:19:18 -0500497 NormalizationParams params = proxy_normalization_params(proxyView.proxy(),
498 proxyView.origin());
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500499 normalize_src_quad(params, &quad->fLocal);
Brian Salomon75cebbe2020-05-18 14:08:14 -0400500 SkRect subset = normalize_and_inset_subset(filter, params, subsetRect);
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500501
Michael Ludwig949ceb22020-02-07 10:14:45 -0500502 // Set bounds before clipping so we don't have to worry about unioning the bounds of
503 // the two potential quads (GrQuad::bounds() is perspective-safe).
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500504 this->setBounds(quad->fDevice.bounds(), HasAABloat(aaType == GrAAType::kCoverage),
Greg Daniel5faf4742019-10-01 15:14:44 -0400505 IsHairline::kNo);
Michael Ludwig949ceb22020-02-07 10:14:45 -0500506
Brian Salomon2432d062020-04-16 20:48:09 -0400507 int quadCount = this->appendQuad(quad, color, subset);
Michael Ludwig949ceb22020-02-07 10:14:45 -0500508 fViewCountPairs[0] = {proxyView.detachProxy(), quadCount};
Brian Salomond7065e72018-10-12 11:42:02 -0400509 }
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400510
Michael Ludwigadb12e72019-12-04 16:19:18 -0500511 TextureOp(GrRenderTargetContext::TextureSetEntry set[],
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400512 int cnt,
Michael Ludwig379e4962019-12-06 13:21:26 -0500513 int proxyRunCnt,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400514 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -0400515 GrSamplerState::MipmapMode mm,
Brian Salomonf19f9ca2019-09-18 15:54:26 -0400516 GrTextureOp::Saturate saturate,
517 GrAAType aaType,
518 SkCanvas::SrcRectConstraint constraint,
519 const SkMatrix& viewMatrix,
Brian Salomond003d222018-11-26 13:25:05 -0500520 sk_sp<GrColorSpaceXform> textureColorSpaceXform)
Brian Salomond7065e72018-10-12 11:42:02 -0400521 : INHERITED(ClassID())
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400522 , fQuads(cnt, true /* includes locals */)
Brian Salomond7065e72018-10-12 11:42:02 -0400523 , fTextureColorSpaceXform(std::move(textureColorSpaceXform))
Chris Daltondbb833b2020-03-17 12:15:46 -0600524 , fDesc(nullptr)
Brian Salomone69b9ef2020-07-22 11:18:06 -0400525 , fMetadata(set[0].fProxyView.swizzle(),
526 GrSamplerState::Filter::kNearest,
527 GrSamplerState::MipmapMode::kNone,
528 Subset::kNo,
529 saturate) {
Michael Ludwigadb12e72019-12-04 16:19:18 -0500530 // Update counts to reflect the batch op
Michael Ludwig379e4962019-12-06 13:21:26 -0500531 fMetadata.fProxyCount = SkToUInt(proxyRunCnt);
Michael Ludwigadb12e72019-12-04 16:19:18 -0500532 fMetadata.fTotalQuadCount = SkToUInt(cnt);
533
Brian Salomond7065e72018-10-12 11:42:02 -0400534 SkRect bounds = SkRectPriv::MakeLargestInverted();
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500535
536 GrAAType netAAType = GrAAType::kNone; // aa type maximally compatible with all dst rects
Brian Salomon2432d062020-04-16 20:48:09 -0400537 Subset netSubset = Subset::kNo;
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500538 GrSamplerState::Filter netFilter = GrSamplerState::Filter::kNearest;
Brian Salomone69b9ef2020-07-22 11:18:06 -0400539 GrSamplerState::MipmapMode netMM = GrSamplerState::MipmapMode::kNone;
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500540
Michael Ludwig379e4962019-12-06 13:21:26 -0500541 const GrSurfaceProxy* curProxy = nullptr;
Michael Ludwig949ceb22020-02-07 10:14:45 -0500542
Michael Ludwig379e4962019-12-06 13:21:26 -0500543 // 'q' is the index in 'set' and fQuadBuffer; 'p' is the index in fViewCountPairs and only
544 // increases when set[q]'s proxy changes.
Michael Ludwig949ceb22020-02-07 10:14:45 -0500545 int p = 0;
546 for (int q = 0; q < cnt; ++q) {
Brian Salomone69b9ef2020-07-22 11:18:06 -0400547 SkASSERT(mm == GrSamplerState::MipmapMode::kNone ||
548 (set[0].fProxyView.proxy()->asTextureProxy()->mipmapped() ==
549 GrMipmapped::kYes));
Michael Ludwig379e4962019-12-06 13:21:26 -0500550 if (q == 0) {
Greg Daniel549325c2019-10-30 16:19:20 -0400551 // We do not placement new the first ViewCountPair since that one is allocated and
552 // initialized as part of the GrTextureOp creation.
Michael Ludwig379e4962019-12-06 13:21:26 -0500553 fViewCountPairs[0].fProxy = set[0].fProxyView.detachProxy();
554 fViewCountPairs[0].fQuadCnt = 0;
555 curProxy = fViewCountPairs[0].fProxy.get();
556 } else if (set[q].fProxyView.proxy() != curProxy) {
Greg Daniel549325c2019-10-30 16:19:20 -0400557 // We must placement new the ViewCountPairs here so that the sk_sps in the
558 // GrSurfaceProxyView get initialized properly.
Michael Ludwig379e4962019-12-06 13:21:26 -0500559 new(&fViewCountPairs[++p])ViewCountPair({set[q].fProxyView.detachProxy(), 0});
Michael Ludwigadb12e72019-12-04 16:19:18 -0500560
Michael Ludwig379e4962019-12-06 13:21:26 -0500561 curProxy = fViewCountPairs[p].fProxy.get();
Greg Danielc71c7962020-01-14 16:44:18 -0500562 SkASSERT(GrTextureProxy::ProxiesAreCompatibleAsDynamicState(
563 curProxy, fViewCountPairs[0].fProxy.get()));
Michael Ludwig379e4962019-12-06 13:21:26 -0500564 SkASSERT(fMetadata.fSwizzle == set[q].fProxyView.swizzle());
Michael Ludwig379e4962019-12-06 13:21:26 -0500565 } // else another quad referencing the same proxy
Michael Ludwigce62dec2019-02-19 11:48:46 -0500566
Michael Ludwig7ae2ab52019-03-05 16:00:20 -0500567 SkMatrix ctm = viewMatrix;
Michael Ludwig379e4962019-12-06 13:21:26 -0500568 if (set[q].fPreViewMatrix) {
569 ctm.preConcat(*set[q].fPreViewMatrix);
Michael Ludwig7ae2ab52019-03-05 16:00:20 -0500570 }
571
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400572 // Use dstRect/srcRect unless dstClip is provided, in which case derive new source
573 // coordinates by mapping dstClipQuad by the dstRect to srcRect transform.
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500574 DrawQuad quad;
Michael Ludwig379e4962019-12-06 13:21:26 -0500575 if (set[q].fDstClipQuad) {
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500576 quad.fDevice = GrQuad::MakeFromSkQuad(set[q].fDstClipQuad, ctm);
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400577
578 SkPoint srcPts[4];
Michael Ludwig379e4962019-12-06 13:21:26 -0500579 GrMapRectPoints(set[q].fDstRect, set[q].fSrcRect, set[q].fDstClipQuad, srcPts, 4);
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500580 quad.fLocal = GrQuad::MakeFromSkQuad(srcPts, SkMatrix::I());
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400581 } else {
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500582 quad.fDevice = GrQuad::MakeFromRect(set[q].fDstRect, ctm);
583 quad.fLocal = GrQuad(set[q].fSrcRect);
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400584 }
Michael Ludwigce62dec2019-02-19 11:48:46 -0500585
Brian Salomone69b9ef2020-07-22 11:18:06 -0400586 if (netFilter != filter || netMM != mm) {
587 // The only way netFilter != filter is if linear is requested and we haven't yet
588 // found a quad that requires linear (so net is still nearest). Similar for mip
589 // mapping.
Brian Salomonf7353512020-07-22 19:26:48 -0400590 SkASSERT(filter == netFilter ||
591 (netFilter == GrSamplerState::Filter::kNearest && filter > netFilter));
592 SkASSERT(mm == netMM ||
593 (netMM == GrSamplerState::MipmapMode::kNone && mm > netMM));
Brian Salomone69b9ef2020-07-22 11:18:06 -0400594 auto [mustFilter, mustMM] = filter_and_mm_have_effect(quad.fLocal, quad.fDevice);
595 if (mustFilter && filter != GrSamplerState::Filter::kNearest) {
Brian Salomonf7353512020-07-22 19:26:48 -0400596 netFilter = filter;
Brian Salomone69b9ef2020-07-22 11:18:06 -0400597 }
598 if (mustMM && mm != GrSamplerState::MipmapMode::kNone) {
Brian Salomonf7353512020-07-22 19:26:48 -0400599 netMM = mm;
Brian Salomone69b9ef2020-07-22 11:18:06 -0400600 }
Michael Ludwig22429f92019-06-27 10:44:48 -0400601 }
602
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500603 // Update overall bounds of the op as the union of all quads
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500604 bounds.joinPossiblyEmptyRect(quad.fDevice.bounds());
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500605
606 // Determine the AA type for the quad, then merge with net AA type
Michael Ludwig6bee7762018-10-19 09:50:36 -0400607 GrAAType aaForQuad;
Michael Ludwig6b45c5d2020-02-07 09:56:38 -0500608 GrQuadUtils::ResolveAAType(aaType, set[q].fAAFlags, quad.fDevice,
609 &aaForQuad, &quad.fEdgeFlags);
John Stilescbe4e282020-06-01 10:38:31 -0400610
Michael Ludwig6bee7762018-10-19 09:50:36 -0400611 // Resolve sets aaForQuad to aaType or None, there is never a change between aa methods
612 SkASSERT(aaForQuad == GrAAType::kNone || aaForQuad == aaType);
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500613 if (netAAType == GrAAType::kNone && aaForQuad != GrAAType::kNone) {
614 netAAType = aaType;
Brian Salomond7065e72018-10-12 11:42:02 -0400615 }
Michael Ludwigf339dfe2019-06-27 10:41:28 -0400616
617 // Calculate metadata for the entry
Brian Salomon2432d062020-04-16 20:48:09 -0400618 const SkRect* subsetForQuad = nullptr;
Michael Ludwig31ba7182019-04-03 10:38:06 -0400619 if (constraint == SkCanvas::kStrict_SrcRectConstraint) {
John Stilescbe4e282020-06-01 10:38:31 -0400620 // Check (briefly) if the subset rect is actually needed for this set entry.
621 SkRect* subsetRect = &set[q].fSrcRect;
622 if (!subsetRect->contains(curProxy->backingStoreBoundsRect())) {
623 if (!safe_to_ignore_subset_rect(aaForQuad, filter, quad, *subsetRect)) {
624 netSubset = Subset::kYes;
625 subsetForQuad = subsetRect;
626 }
Michael Ludwig31ba7182019-04-03 10:38:06 -0400627 }
628 }
John Stilescbe4e282020-06-01 10:38:31 -0400629
630 // Normalize the src quads and apply origin
631 NormalizationParams proxyParams = proxy_normalization_params(
632 curProxy, set[q].fProxyView.origin());
633 normalize_src_quad(proxyParams, &quad.fLocal);
634
Brian Salomon2432d062020-04-16 20:48:09 -0400635 // This subset may represent a no-op, otherwise it will have the origin and dimensions
Michael Ludwig7c6a4a82020-02-07 10:14:26 -0500636 // of the texture applied to it. Insetting for bilinear filtering is deferred until
637 // on[Pre]Prepare so that the overall filter can be lazily determined.
Brian Salomon75cebbe2020-05-18 14:08:14 -0400638 SkRect subset = normalize_and_inset_subset(filter, proxyParams, subsetForQuad);
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500639
Michael Ludwig949ceb22020-02-07 10:14:45 -0500640 // Always append a quad (or 2 if perspective clipped), it just may refer back to a prior
641 // ViewCountPair (this frequently happens when Chrome draws 9-patches).
Michael Ludwig1c66ad92020-07-10 08:59:44 -0400642 fViewCountPairs[p].fQuadCnt += this->appendQuad(&quad, set[q].fColor, subset);
Brian Salomond7065e72018-10-12 11:42:02 -0400643 }
Michael Ludwig406172a2019-12-06 14:05:19 -0500644 // The # of proxy switches should match what was provided (+1 because we incremented p
Michael Ludwig379e4962019-12-06 13:21:26 -0500645 // when a new proxy was encountered).
Michael Ludwig406172a2019-12-06 14:05:19 -0500646 SkASSERT((p + 1) == fMetadata.fProxyCount);
Michael Ludwig379e4962019-12-06 13:21:26 -0500647 SkASSERT(fQuads.count() == fMetadata.fTotalQuadCount);
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500648
Michael Ludwig4384f042019-12-05 10:30:35 -0500649 fMetadata.fAAType = static_cast<uint16_t>(netAAType);
650 fMetadata.fFilter = static_cast<uint16_t>(netFilter);
Brian Salomon2432d062020-04-16 20:48:09 -0400651 fMetadata.fSubset = static_cast<uint16_t>(netSubset);
Brian Salomon34169692017-08-28 15:32:01 -0400652
Michael Ludwig119ac6d2019-11-21 09:26:46 -0500653 this->setBounds(bounds, HasAABloat(netAAType == GrAAType::kCoverage), IsHairline::kNo);
Brian Salomon17031a72018-05-22 14:14:07 -0400654 }
655
Brian Salomon2432d062020-04-16 20:48:09 -0400656 int appendQuad(DrawQuad* quad, const SkPMColor4f& color, const SkRect& subset) {
Michael Ludwig949ceb22020-02-07 10:14:45 -0500657 DrawQuad extra;
Michael Ludwig465864c2020-02-10 09:30:04 -0500658 // Only clip when there's anti-aliasing. When non-aa, the GPU clips just fine and there's
659 // no inset/outset math that requires w > 0.
660 int quadCount = quad->fEdgeFlags != GrQuadAAFlags::kNone ?
661 GrQuadUtils::ClipToW0(quad, &extra) : 1;
Michael Ludwig949ceb22020-02-07 10:14:45 -0500662 if (quadCount == 0) {
663 // We can't discard the op at this point, but disable AA flags so it won't go through
664 // inset/outset processing
665 quad->fEdgeFlags = GrQuadAAFlags::kNone;
666 quadCount = 1;
667 }
Brian Salomon2432d062020-04-16 20:48:09 -0400668 fQuads.append(quad->fDevice, {color, subset, quad->fEdgeFlags}, &quad->fLocal);
Michael Ludwig949ceb22020-02-07 10:14:45 -0500669 if (quadCount > 1) {
Brian Salomon2432d062020-04-16 20:48:09 -0400670 fQuads.append(extra.fDevice, {color, subset, extra.fEdgeFlags}, &extra.fLocal);
Michael Ludwig949ceb22020-02-07 10:14:45 -0500671 fMetadata.fTotalQuadCount++;
672 }
673 return quadCount;
674 }
675
Robert Phillips2669a7b2020-03-12 12:07:19 -0400676 GrProgramInfo* programInfo() override {
Chris Daltondbb833b2020-03-17 12:15:46 -0600677 // Although this Op implements its own onPrePrepareDraws it calls GrMeshDrawOps' version so
678 // this entry point will be called.
679 return (fDesc) ? fDesc->fProgramInfo : nullptr;
Robert Phillips2669a7b2020-03-12 12:07:19 -0400680 }
681
Chris Daltondbb833b2020-03-17 12:15:46 -0600682 void onCreateProgramInfo(const GrCaps* caps,
683 SkArenaAlloc* arena,
Brian Salomon8afde5f2020-04-01 16:22:00 -0400684 const GrSurfaceProxyView* writeView,
Chris Daltondbb833b2020-03-17 12:15:46 -0600685 GrAppliedClip&& appliedClip,
686 const GrXferProcessor::DstProxyView& dstProxyView) override {
687 SkASSERT(fDesc);
688
689 GrGeometryProcessor* gp;
690
691 {
692 const GrBackendFormat& backendFormat =
693 fViewCountPairs[0].fProxy->backendFormat();
694
695 GrSamplerState samplerState = GrSamplerState(GrSamplerState::WrapMode::kClamp,
696 fMetadata.filter());
697
698 gp = GrQuadPerEdgeAA::MakeTexturedProcessor(
699 arena, fDesc->fVertexSpec, *caps->shaderCaps(), backendFormat, samplerState,
700 fMetadata.fSwizzle, std::move(fTextureColorSpaceXform), fMetadata.saturate());
701
702 SkASSERT(fDesc->fVertexSpec.vertexSize() == gp->vertexStride());
703 }
704
705 auto pipelineFlags = (GrAAType::kMSAA == fMetadata.aaType()) ?
706 GrPipeline::InputFlags::kHWAntialias : GrPipeline::InputFlags::kNone;
707
708 fDesc->fProgramInfo = GrSimpleMeshDrawOpHelper::CreateProgramInfo(
Brian Salomon8afde5f2020-04-01 16:22:00 -0400709 caps, arena, writeView, std::move(appliedClip), dstProxyView, gp,
Chris Daltondbb833b2020-03-17 12:15:46 -0600710 GrProcessorSet::MakeEmptySet(), fDesc->fVertexSpec.primitiveType(),
711 pipelineFlags);
Robert Phillips4133dc42020-03-11 15:55:55 -0400712 }
713
Robert Phillipsdf70f152019-11-15 14:57:05 -0500714 void onPrePrepareDraws(GrRecordingContext* context,
Brian Salomon8afde5f2020-04-01 16:22:00 -0400715 const GrSurfaceProxyView* writeView,
Robert Phillips8053c972019-11-21 10:44:53 -0500716 GrAppliedClip* clip,
717 const GrXferProcessor::DstProxyView& dstProxyView) override {
Robert Phillips61fc7992019-10-22 11:58:17 -0400718 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Robert Phillips29f38542019-10-16 09:20:25 -0400719
Robert Phillips61fc7992019-10-22 11:58:17 -0400720 SkDEBUGCODE(this->validate();)
Chris Daltondbb833b2020-03-17 12:15:46 -0600721 SkASSERT(!fDesc);
Robert Phillips61fc7992019-10-22 11:58:17 -0400722
Robert Phillipsd4fb7c72019-11-15 17:28:37 -0500723 SkArenaAlloc* arena = context->priv().recordTimeAllocator();
Robert Phillips61fc7992019-10-22 11:58:17 -0400724
Chris Daltondbb833b2020-03-17 12:15:46 -0600725 fDesc = arena->make<Desc>();
726 this->characterize(fDesc);
727 fDesc->allocatePrePreparedVertices(arena);
728 FillInVertices(*context->priv().caps(), this, fDesc, fDesc->fPrePreparedVertices);
Robert Phillips61fc7992019-10-22 11:58:17 -0400729
Chris Daltondbb833b2020-03-17 12:15:46 -0600730 // This will call onCreateProgramInfo and register the created program with the DDL.
Brian Salomon8afde5f2020-04-01 16:22:00 -0400731 this->INHERITED::onPrePrepareDraws(context, writeView, clip, dstProxyView);
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400732 }
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400733
Chris Daltondbb833b2020-03-17 12:15:46 -0600734 static void FillInVertices(const GrCaps& caps, TextureOp* texOp, Desc* desc, char* vertexData) {
735 SkASSERT(vertexData);
736
Robert Phillipsfd0c3b52019-11-01 08:44:42 -0400737 int totQuadsSeen = 0;
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400738 SkDEBUGCODE(int totVerticesSeen = 0;)
Michael Ludwig189c9802019-11-21 11:21:12 -0500739 SkDEBUGCODE(const size_t vertexSize = desc->fVertexSpec.vertexSize());
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400740
Chris Daltondbb833b2020-03-17 12:15:46 -0600741 GrQuadPerEdgeAA::Tessellator tessellator(desc->fVertexSpec, vertexData);
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400742 for (const auto& op : ChainRange<TextureOp>(texOp)) {
743 auto iter = op.fQuads.iterator();
Michael Ludwigadb12e72019-12-04 16:19:18 -0500744 for (unsigned p = 0; p < op.fMetadata.fProxyCount; ++p) {
Michael Ludwig189c9802019-11-21 11:21:12 -0500745 const int quadCnt = op.fViewCountPairs[p].fQuadCnt;
746 SkDEBUGCODE(int meshVertexCnt = quadCnt * desc->fVertexSpec.verticesPerQuad());
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400747
Chris Daltondbb833b2020-03-17 12:15:46 -0600748 for (int i = 0; i < quadCnt && iter.next(); ++i) {
749 SkASSERT(iter.isLocalValid());
Brian Salomon2432d062020-04-16 20:48:09 -0400750 const ColorSubsetAndAA& info = iter.metadata();
Michael Ludwig7c6a4a82020-02-07 10:14:26 -0500751
Chris Daltondbb833b2020-03-17 12:15:46 -0600752 tessellator.append(iter.deviceQuad(), iter.localQuad(), info.fColor,
Brian Salomon75cebbe2020-05-18 14:08:14 -0400753 info.fSubsetRect, info.aaFlags());
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400754 }
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400755
Chris Daltondbb833b2020-03-17 12:15:46 -0600756 SkASSERT((totVerticesSeen + meshVertexCnt) * vertexSize
757 == (size_t)(tessellator.vertices() - vertexData));
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400758
Robert Phillipsfd0c3b52019-11-01 08:44:42 -0400759 totQuadsSeen += quadCnt;
760 SkDEBUGCODE(totVerticesSeen += meshVertexCnt);
761 SkASSERT(totQuadsSeen * desc->fVertexSpec.verticesPerQuad() == totVerticesSeen);
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400762 }
763
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400764 // If quad counts per proxy were calculated correctly, the entire iterator
765 // should have been consumed.
Chris Daltondbb833b2020-03-17 12:15:46 -0600766 SkASSERT(!iter.next());
Robert Phillipsc5a2c752019-10-24 13:11:45 -0400767 }
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400768
Chris Daltondbb833b2020-03-17 12:15:46 -0600769 SkASSERT(desc->totalSizeInBytes() == (size_t)(tessellator.vertices() - vertexData));
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400770 SkASSERT(totQuadsSeen == desc->fNumTotalQuads);
771 SkASSERT(totVerticesSeen == desc->totalNumVertices());
Robert Phillips7327c9d2019-10-08 16:32:56 -0400772 }
773
Robert Phillips29f38542019-10-16 09:20:25 -0400774#ifdef SK_DEBUG
Robert Phillips6bf11252020-07-31 12:15:00 -0400775 static int validate_op(GrTextureType textureType,
776 GrAAType aaType,
777 GrSwizzle swizzle,
778 const TextureOp* op) {
779 SkASSERT(op->fMetadata.fSwizzle == swizzle);
780
781 int quadCount = 0;
782 for (unsigned p = 0; p < op->fMetadata.fProxyCount; ++p) {
783 auto* proxy = op->fViewCountPairs[p].fProxy->asTextureProxy();
784 quadCount += op->fViewCountPairs[p].fQuadCnt;
785 SkASSERT(proxy);
786 SkASSERT(proxy->textureType() == textureType);
787 }
788
789 SkASSERT(aaType == op->fMetadata.aaType());
790 return quadCount;
791 }
792
Robert Phillips29f38542019-10-16 09:20:25 -0400793 void validate() const override {
Michael Ludwigfcdd0612019-11-25 08:34:31 -0500794 // NOTE: Since this is debug-only code, we use the virtual asTextureProxy()
Michael Ludwigadb12e72019-12-04 16:19:18 -0500795 auto textureType = fViewCountPairs[0].fProxy->asTextureProxy()->textureType();
796 GrAAType aaType = fMetadata.aaType();
Robert Phillips6bf11252020-07-31 12:15:00 -0400797 GrSwizzle swizzle = fMetadata.fSwizzle;
Robert Phillips29f38542019-10-16 09:20:25 -0400798
Robert Phillips6bf11252020-07-31 12:15:00 -0400799 int quadCount = validate_op(textureType, aaType, swizzle, this);
Michael Ludwigadb12e72019-12-04 16:19:18 -0500800
Robert Phillips6bf11252020-07-31 12:15:00 -0400801 for (const GrOp* tmp = this->prevInChain(); tmp; tmp = tmp->prevInChain()) {
802 quadCount += validate_op(textureType, aaType, swizzle,
803 static_cast<const TextureOp*>(tmp));
804 }
Robert Phillips29f38542019-10-16 09:20:25 -0400805
Robert Phillips6bf11252020-07-31 12:15:00 -0400806 for (const GrOp* tmp = this->nextInChain(); tmp; tmp = tmp->nextInChain()) {
807 quadCount += validate_op(textureType, aaType, swizzle,
808 static_cast<const TextureOp*>(tmp));
Robert Phillips29f38542019-10-16 09:20:25 -0400809 }
Robert Phillipse837e612019-11-15 11:02:50 -0500810
811 SkASSERT(quadCount == this->numChainedQuads());
Robert Phillips29f38542019-10-16 09:20:25 -0400812 }
Robert Phillips6bf11252020-07-31 12:15:00 -0400813
Robert Phillips29f38542019-10-16 09:20:25 -0400814#endif
815
Robert Phillipse837e612019-11-15 11:02:50 -0500816#if GR_TEST_UTILS
817 int numQuads() const final { return this->totNumQuads(); }
818#endif
819
Chris Daltondbb833b2020-03-17 12:15:46 -0600820 void characterize(Desc* desc) const {
Robert Phillips6bf11252020-07-31 12:15:00 -0400821 SkDEBUGCODE(this->validate();)
822
Robert Phillips29f38542019-10-16 09:20:25 -0400823 GrQuad::Type quadType = GrQuad::Type::kAxisAligned;
824 ColorType colorType = ColorType::kNone;
825 GrQuad::Type srcQuadType = GrQuad::Type::kAxisAligned;
Brian Salomon2432d062020-04-16 20:48:09 -0400826 Subset subset = Subset::kNo;
Michael Ludwigadb12e72019-12-04 16:19:18 -0500827 GrAAType overallAAType = fMetadata.aaType();
Robert Phillips29f38542019-10-16 09:20:25 -0400828
Robert Phillipsc554dcf2019-10-28 11:43:55 -0400829 desc->fNumProxies = 0;
830 desc->fNumTotalQuads = 0;
831 int maxQuadsPerMesh = 0;
Robert Phillips29f38542019-10-16 09:20:25 -0400832
Brian Salomonf7232642018-09-19 08:58:08 -0400833 for (const auto& op : ChainRange<TextureOp>(this)) {
Michael Ludwig425eb452019-06-27 10:13:27 -0400834 if (op.fQuads.deviceQuadType() > quadType) {
835 quadType = op.fQuads.deviceQuadType();
Michael Ludwigf995c052018-11-26 15:24:29 -0500836 }
Michael Ludwig425eb452019-06-27 10:13:27 -0400837 if (op.fQuads.localQuadType() > srcQuadType) {
838 srcQuadType = op.fQuads.localQuadType();
Michael Ludwig009b92e2019-02-15 16:03:53 -0500839 }
Brian Salomon2432d062020-04-16 20:48:09 -0400840 if (op.fMetadata.subset() == Subset::kYes) {
841 subset = Subset::kYes;
Brian Salomonf7232642018-09-19 08:58:08 -0400842 }
Brian Osman788b9162020-02-07 10:36:46 -0500843 colorType = std::max(colorType, op.fMetadata.colorType());
Michael Ludwigadb12e72019-12-04 16:19:18 -0500844 desc->fNumProxies += op.fMetadata.fProxyCount;
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400845
Michael Ludwigadb12e72019-12-04 16:19:18 -0500846 for (unsigned p = 0; p < op.fMetadata.fProxyCount; ++p) {
Brian Osman788b9162020-02-07 10:36:46 -0500847 maxQuadsPerMesh = std::max(op.fViewCountPairs[p].fQuadCnt, maxQuadsPerMesh);
Brian Salomonf7232642018-09-19 08:58:08 -0400848 }
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400849 desc->fNumTotalQuads += op.totNumQuads();
850
Michael Ludwigadb12e72019-12-04 16:19:18 -0500851 if (op.fMetadata.aaType() == GrAAType::kCoverage) {
Robert Phillips29f38542019-10-16 09:20:25 -0400852 overallAAType = GrAAType::kCoverage;
Brian Salomonae7d7702018-10-14 15:05:45 -0400853 }
Brian Salomon34169692017-08-28 15:32:01 -0400854 }
Brian Salomon336ce7b2017-09-08 08:23:58 -0400855
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400856 SkASSERT(desc->fNumTotalQuads == this->numChainedQuads());
857
858 SkASSERT(!CombinedQuadCountWillOverflow(overallAAType, false, desc->fNumTotalQuads));
859
Robert Phillipsc554dcf2019-10-28 11:43:55 -0400860 auto indexBufferOption = GrQuadPerEdgeAA::CalcIndexBufferOption(overallAAType,
861 maxQuadsPerMesh);
862
863 desc->fVertexSpec = VertexSpec(quadType, colorType, srcQuadType, /* hasLocal */ true,
Brian Salomon2432d062020-04-16 20:48:09 -0400864 subset, overallAAType, /* alpha as coverage */ true,
Robert Phillipsc554dcf2019-10-28 11:43:55 -0400865 indexBufferOption);
Robert Phillipse837e612019-11-15 11:02:50 -0500866
867 SkASSERT(desc->fNumTotalQuads <= GrQuadPerEdgeAA::QuadLimit(indexBufferOption));
Robert Phillips29f38542019-10-16 09:20:25 -0400868 }
Michael Ludwigc182b942018-11-16 10:27:51 -0500869
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400870 int totNumQuads() const {
871#ifdef SK_DEBUG
872 int tmp = 0;
Michael Ludwigadb12e72019-12-04 16:19:18 -0500873 for (unsigned p = 0; p < fMetadata.fProxyCount; ++p) {
Greg Daniel549325c2019-10-30 16:19:20 -0400874 tmp += fViewCountPairs[p].fQuadCnt;
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400875 }
Michael Ludwigadb12e72019-12-04 16:19:18 -0500876 SkASSERT(tmp == fMetadata.fTotalQuadCount);
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400877#endif
878
Michael Ludwigadb12e72019-12-04 16:19:18 -0500879 return fMetadata.fTotalQuadCount;
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400880 }
881
882 int numChainedQuads() const {
883 int numChainedQuads = this->totNumQuads();
884
885 for (const GrOp* tmp = this->prevInChain(); tmp; tmp = tmp->prevInChain()) {
886 numChainedQuads += ((const TextureOp*)tmp)->totNumQuads();
887 }
888
889 for (const GrOp* tmp = this->nextInChain(); tmp; tmp = tmp->nextInChain()) {
890 numChainedQuads += ((const TextureOp*)tmp)->totNumQuads();
891 }
892
893 return numChainedQuads;
894 }
895
Robert Phillips29f38542019-10-16 09:20:25 -0400896 // onPrePrepareDraws may or may not have been called at this point
897 void onPrepareDraws(Target* target) override {
898 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel7a82edf2018-12-04 10:54:34 -0500899
Robert Phillips29f38542019-10-16 09:20:25 -0400900 SkDEBUGCODE(this->validate();)
901
Chris Daltondbb833b2020-03-17 12:15:46 -0600902 SkASSERT(!fDesc || fDesc->fPrePreparedVertices);
Robert Phillips29f38542019-10-16 09:20:25 -0400903
Chris Daltondbb833b2020-03-17 12:15:46 -0600904 if (!fDesc) {
Robert Phillips61fc7992019-10-22 11:58:17 -0400905 SkArenaAlloc* arena = target->allocator();
Chris Daltondbb833b2020-03-17 12:15:46 -0600906 fDesc = arena->make<Desc>();
907 this->characterize(fDesc);
908 SkASSERT(!fDesc->fPrePreparedVertices);
Brian Salomonf7232642018-09-19 08:58:08 -0400909 }
Brian Salomon92be2f72018-06-19 14:33:47 -0400910
Chris Daltondbb833b2020-03-17 12:15:46 -0600911 size_t vertexSize = fDesc->fVertexSpec.vertexSize();
Brian Salomon92be2f72018-06-19 14:33:47 -0400912
Chris Daltondbb833b2020-03-17 12:15:46 -0600913 void* vdata = target->makeVertexSpace(vertexSize, fDesc->totalNumVertices(),
914 &fDesc->fVertexBuffer, &fDesc->fBaseVertex);
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400915 if (!vdata) {
916 SkDebugf("Could not allocate vertices\n");
917 return;
Brian Salomon34169692017-08-28 15:32:01 -0400918 }
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400919
Chris Daltondbb833b2020-03-17 12:15:46 -0600920 if (fDesc->fVertexSpec.needsIndexBuffer()) {
921 fDesc->fIndexBuffer = GrQuadPerEdgeAA::GetIndexBuffer(
922 target, fDesc->fVertexSpec.indexBufferOption());
923 if (!fDesc->fIndexBuffer) {
Robert Phillipsfd0c3b52019-11-01 08:44:42 -0400924 SkDebugf("Could not allocate indices\n");
925 return;
926 }
927 }
928
Chris Daltondbb833b2020-03-17 12:15:46 -0600929 if (fDesc->fPrePreparedVertices) {
930 memcpy(vdata, fDesc->fPrePreparedVertices, fDesc->totalSizeInBytes());
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400931 } else {
Chris Daltondbb833b2020-03-17 12:15:46 -0600932 FillInVertices(target->caps(), this, fDesc, (char*) vdata);
Robert Phillipsbbd459d2019-10-29 14:40:03 -0400933 }
Chris Dalton07cdcfc92019-02-26 11:13:22 -0700934 }
935
936 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Daltondbb833b2020-03-17 12:15:46 -0600937 if (!fDesc->fVertexBuffer) {
938 return;
939 }
Robert Phillips3968fcb2019-12-05 16:40:31 -0500940
Chris Daltondbb833b2020-03-17 12:15:46 -0600941 if (fDesc->fVertexSpec.needsIndexBuffer() && !fDesc->fIndexBuffer) {
942 return;
943 }
Robert Phillips3968fcb2019-12-05 16:40:31 -0500944
Chris Daltondbb833b2020-03-17 12:15:46 -0600945 if (!fDesc->fProgramInfo) {
946 this->createProgramInfo(flushState);
947 SkASSERT(fDesc->fProgramInfo);
948 }
949
950 flushState->bindPipelineAndScissorClip(*fDesc->fProgramInfo, chainBounds);
Greg Daniel426274b2020-07-20 11:37:38 -0400951 flushState->bindBuffers(std::move(fDesc->fIndexBuffer), nullptr,
952 std::move(fDesc->fVertexBuffer));
Chris Daltondbb833b2020-03-17 12:15:46 -0600953
954 int totQuadsSeen = 0;
955 SkDEBUGCODE(int numDraws = 0;)
956 for (const auto& op : ChainRange<TextureOp>(this)) {
957 for (unsigned p = 0; p < op.fMetadata.fProxyCount; ++p) {
958 const int quadCnt = op.fViewCountPairs[p].fQuadCnt;
959 SkASSERT(numDraws < fDesc->fNumProxies);
960 flushState->bindTextures(fDesc->fProgramInfo->primProc(),
961 *op.fViewCountPairs[p].fProxy,
962 fDesc->fProgramInfo->pipeline());
963 GrQuadPerEdgeAA::IssueDraw(flushState->caps(), flushState->opsRenderPass(),
964 fDesc->fVertexSpec, totQuadsSeen, quadCnt,
965 fDesc->totalNumVertices(), fDesc->fBaseVertex);
966 totQuadsSeen += quadCnt;
967 SkDEBUGCODE(++numDraws;)
968 }
969 }
970
971 SkASSERT(totQuadsSeen == fDesc->fNumTotalQuads);
972 SkASSERT(numDraws == fDesc->fNumProxies);
Brian Salomon34169692017-08-28 15:32:01 -0400973 }
974
Robert Phillips6bf11252020-07-31 12:15:00 -0400975 void propagateCoverageAAThroughoutChain() {
976 fMetadata.fAAType = static_cast<uint16_t>(GrAAType::kCoverage);
977
978 for (GrOp* tmp = this->prevInChain(); tmp; tmp = tmp->prevInChain()) {
979 TextureOp* tex = static_cast<TextureOp*>(tmp);
980 SkASSERT(tex->fMetadata.aaType() == GrAAType::kCoverage ||
981 tex->fMetadata.aaType() == GrAAType::kNone);
982 tex->fMetadata.fAAType = static_cast<uint16_t>(GrAAType::kCoverage);
983 }
984
985 for (GrOp* tmp = this->nextInChain(); tmp; tmp = tmp->nextInChain()) {
986 TextureOp* tex = static_cast<TextureOp*>(tmp);
987 SkASSERT(tex->fMetadata.aaType() == GrAAType::kCoverage ||
988 tex->fMetadata.aaType() == GrAAType::kNone);
989 tex->fMetadata.fAAType = static_cast<uint16_t>(GrAAType::kCoverage);
990 }
991 }
992
Michael Ludwig28b0c5d2019-12-19 14:51:00 -0500993 CombineResult onCombineIfPossible(GrOp* t, GrRecordingContext::Arenas*,
994 const GrCaps& caps) override {
Brian Salomon5f394272019-07-02 14:07:49 -0400995 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon34169692017-08-28 15:32:01 -0400996 const auto* that = t->cast<TextureOp>();
Robert Phillips7327c9d2019-10-08 16:32:56 -0400997
Robert Phillips6bf11252020-07-31 12:15:00 -0400998 SkDEBUGCODE(this->validate();)
999 SkDEBUGCODE(that->validate();)
1000
Chris Daltondbb833b2020-03-17 12:15:46 -06001001 if (fDesc || that->fDesc) {
Robert Phillips7327c9d2019-10-08 16:32:56 -04001002 // This should never happen (since only DDL recorded ops should be prePrepared)
1003 // but, in any case, we should never combine ops that that been prePrepared
1004 return CombineResult::kCannotCombine;
1005 }
1006
Brian Salomon2432d062020-04-16 20:48:09 -04001007 if (fMetadata.subset() != that->fMetadata.subset()) {
1008 // It is technically possible to combine operations across subset modes, but performance
Michael Ludwig2929f512019-04-19 13:05:56 -04001009 // testing suggests it's better to make more draw calls where some take advantage of
1010 // the more optimal shader path without coordinate clamping.
1011 return CombineResult::kCannotCombine;
1012 }
Brian Osman3ebd3542018-07-30 14:36:53 -04001013 if (!GrColorSpaceXform::Equals(fTextureColorSpaceXform.get(),
1014 that->fTextureColorSpaceXform.get())) {
Brian Salomon7eae3e02018-08-07 14:02:38 +00001015 return CombineResult::kCannotCombine;
Brian Osman3ebd3542018-07-30 14:36:53 -04001016 }
Robert Phillipsb69001f2019-10-29 12:16:35 -04001017
Brian Salomonae7d7702018-10-14 15:05:45 -04001018 bool upgradeToCoverageAAOnMerge = false;
Michael Ludwigadb12e72019-12-04 16:19:18 -05001019 if (fMetadata.aaType() != that->fMetadata.aaType()) {
1020 if (!CanUpgradeAAOnMerge(fMetadata.aaType(), that->fMetadata.aaType())) {
Brian Salomonae7d7702018-10-14 15:05:45 -04001021 return CombineResult::kCannotCombine;
1022 }
1023 upgradeToCoverageAAOnMerge = true;
Brian Salomonb5ef1f92018-01-11 11:46:21 -05001024 }
Robert Phillipsb69001f2019-10-29 12:16:35 -04001025
Michael Ludwigadb12e72019-12-04 16:19:18 -05001026 if (CombinedQuadCountWillOverflow(fMetadata.aaType(), upgradeToCoverageAAOnMerge,
Robert Phillipsbbd459d2019-10-29 14:40:03 -04001027 this->numChainedQuads() + that->numChainedQuads())) {
1028 return CombineResult::kCannotCombine;
Robert Phillipsb69001f2019-10-29 12:16:35 -04001029 }
1030
Michael Ludwigadb12e72019-12-04 16:19:18 -05001031 if (fMetadata.saturate() != that->fMetadata.saturate()) {
Brian Salomonf19f9ca2019-09-18 15:54:26 -04001032 return CombineResult::kCannotCombine;
1033 }
Michael Ludwigadb12e72019-12-04 16:19:18 -05001034 if (fMetadata.filter() != that->fMetadata.filter()) {
Brian Salomonf7232642018-09-19 08:58:08 -04001035 return CombineResult::kCannotCombine;
1036 }
Brian Salomone69b9ef2020-07-22 11:18:06 -04001037 if (fMetadata.mipmapMode() != that->fMetadata.mipmapMode()) {
1038 return CombineResult::kCannotCombine;
1039 }
Michael Ludwigadb12e72019-12-04 16:19:18 -05001040 if (fMetadata.fSwizzle != that->fMetadata.fSwizzle) {
1041 return CombineResult::kCannotCombine;
1042 }
1043 const auto* thisProxy = fViewCountPairs[0].fProxy.get();
1044 const auto* thatProxy = that->fViewCountPairs[0].fProxy.get();
1045 if (fMetadata.fProxyCount > 1 || that->fMetadata.fProxyCount > 1 ||
1046 thisProxy != thatProxy) {
Brian Salomon588cec72018-11-14 13:56:37 -05001047 // We can't merge across different proxies. Check if 'this' can be chained with 'that'.
Greg Daniel45723ac2018-11-30 10:12:43 -05001048 if (GrTextureProxy::ProxiesAreCompatibleAsDynamicState(thisProxy, thatProxy) &&
Robert Phillips6bf11252020-07-31 12:15:00 -04001049 caps.dynamicStateArrayGeometryProcessorTextureSupport() &&
1050 fMetadata.aaType() == that->fMetadata.aaType()) {
1051 // We only allow chaining when the aaTypes match bc otherwise the AA type
1052 // reported by the chain can be inconsistent. That is, since chaining doesn't
1053 // propagate revised AA information throughout the chain, the head of the chain
1054 // could have an AA setting of kNone while the chain as a whole could have a
1055 // setting of kCoverage. This inconsistency would then interfere with the validity
1056 // of the CombinedQuadCountWillOverflow calls.
1057 // This problem doesn't occur w/ merging bc we do propagate the AA information
1058 // (in propagateCoverageAAThroughoutChain) below.
Brian Salomonf7232642018-09-19 08:58:08 -04001059 return CombineResult::kMayChain;
1060 }
Brian Salomon7eae3e02018-08-07 14:02:38 +00001061 return CombineResult::kCannotCombine;
Brian Salomon336ce7b2017-09-08 08:23:58 -04001062 }
Michael Ludwig009b92e2019-02-15 16:03:53 -05001063
Brian Salomon2432d062020-04-16 20:48:09 -04001064 fMetadata.fSubset |= that->fMetadata.fSubset;
Brian Osman788b9162020-02-07 10:36:46 -05001065 fMetadata.fColorType = std::max(fMetadata.fColorType, that->fMetadata.fColorType);
Michael Ludwig009b92e2019-02-15 16:03:53 -05001066
Michael Ludwig425eb452019-06-27 10:13:27 -04001067 // Concatenate quad lists together
Michael Ludwig009b92e2019-02-15 16:03:53 -05001068 fQuads.concat(that->fQuads);
Greg Daniel549325c2019-10-30 16:19:20 -04001069 fViewCountPairs[0].fQuadCnt += that->fQuads.count();
Michael Ludwigadb12e72019-12-04 16:19:18 -05001070 fMetadata.fTotalQuadCount += that->fQuads.count();
Michael Ludwig009b92e2019-02-15 16:03:53 -05001071
Robert Phillips6bf11252020-07-31 12:15:00 -04001072 if (upgradeToCoverageAAOnMerge) {
1073 this->propagateCoverageAAThroughoutChain();
1074 }
1075
1076 SkDEBUGCODE(this->validate();)
1077
Brian Salomon7eae3e02018-08-07 14:02:38 +00001078 return CombineResult::kMerged;
Brian Salomon34169692017-08-28 15:32:01 -04001079 }
Brian Salomon2432d062020-04-16 20:48:09 -04001080 GrQuadBuffer<ColorSubsetAndAA> fQuads;
Brian Osman3ebd3542018-07-30 14:36:53 -04001081 sk_sp<GrColorSpaceXform> fTextureColorSpaceXform;
Chris Daltondbb833b2020-03-17 12:15:46 -06001082 // Most state of TextureOp is packed into these two field to minimize the op's size.
Michael Ludwigadb12e72019-12-04 16:19:18 -05001083 // Historically, increasing the size of TextureOp has caused surprising perf regressions, so
1084 // consider/measure changes with care.
Chris Daltondbb833b2020-03-17 12:15:46 -06001085 Desc* fDesc;
Michael Ludwigadb12e72019-12-04 16:19:18 -05001086 Metadata fMetadata;
Robert Phillips32803ff2019-10-23 08:26:08 -04001087
1088 // This field must go last. When allocating this op, we will allocate extra space to hold
Greg Daniel549325c2019-10-30 16:19:20 -04001089 // additional ViewCountPairs immediately after the op's allocation so we can treat this
Robert Phillips32803ff2019-10-23 08:26:08 -04001090 // as an fProxyCnt-length array.
Greg Daniel549325c2019-10-30 16:19:20 -04001091 ViewCountPair fViewCountPairs[1];
Brian Salomon336ce7b2017-09-08 08:23:58 -04001092
Brian Salomon34169692017-08-28 15:32:01 -04001093 typedef GrMeshDrawOp INHERITED;
1094};
1095
1096} // anonymous namespace
1097
Robert Phillipse837e612019-11-15 11:02:50 -05001098#if GR_TEST_UTILS
1099uint32_t GrTextureOp::ClassID() {
1100 return TextureOp::ClassID();
1101}
1102#endif
Brian Salomon34169692017-08-28 15:32:01 -04001103
Robert Phillipse837e612019-11-15 11:02:50 -05001104std::unique_ptr<GrDrawOp> GrTextureOp::Make(GrRecordingContext* context,
1105 GrSurfaceProxyView proxyView,
Brian Salomonfc118442019-11-22 19:09:27 -05001106 SkAlphaType alphaType,
Robert Phillipse837e612019-11-15 11:02:50 -05001107 sk_sp<GrColorSpaceXform> textureXform,
1108 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -04001109 GrSamplerState::MipmapMode mm,
Robert Phillipse837e612019-11-15 11:02:50 -05001110 const SkPMColor4f& color,
1111 Saturate saturate,
1112 SkBlendMode blendMode,
1113 GrAAType aaType,
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001114 DrawQuad* quad,
Brian Salomon2432d062020-04-16 20:48:09 -04001115 const SkRect* subset) {
Michael Ludwig22429f92019-06-27 10:44:48 -04001116 // Apply optimizations that are valid whether or not using GrTextureOp or GrFillRectOp
Brian Salomon2432d062020-04-16 20:48:09 -04001117 if (subset && subset->contains(proxyView.proxy()->backingStoreBoundsRect())) {
1118 // No need for a shader-based subset if hardware clamping achieves the same effect
1119 subset = nullptr;
Michael Ludwig22429f92019-06-27 10:44:48 -04001120 }
1121
Brian Salomone69b9ef2020-07-22 11:18:06 -04001122 if (filter != GrSamplerState::Filter::kNearest || mm != GrSamplerState::MipmapMode::kNone) {
1123 auto [mustFilter, mustMM] = filter_and_mm_have_effect(quad->fLocal, quad->fDevice);
1124 if (!mustFilter) {
1125 filter = GrSamplerState::Filter::kNearest;
1126 }
1127 if (!mustMM) {
1128 mm = GrSamplerState::MipmapMode::kNone;
1129 }
Michael Ludwig22429f92019-06-27 10:44:48 -04001130 }
1131
1132 if (blendMode == SkBlendMode::kSrcOver) {
Brian Salomone69b9ef2020-07-22 11:18:06 -04001133 return TextureOp::Make(context, std::move(proxyView), std::move(textureXform), filter, mm,
Brian Salomon2432d062020-04-16 20:48:09 -04001134 color, saturate, aaType, std::move(quad), subset);
Michael Ludwig22429f92019-06-27 10:44:48 -04001135 } else {
1136 // Emulate complex blending using GrFillRectOp
1137 GrPaint paint;
1138 paint.setColor4f(color);
1139 paint.setXPFactory(SkBlendMode_AsXPFactory(blendMode));
1140
1141 std::unique_ptr<GrFragmentProcessor> fp;
Brian Salomon2432d062020-04-16 20:48:09 -04001142 if (subset) {
Brian Salomonca6b2f42020-01-24 11:31:21 -05001143 const auto& caps = *context->priv().caps();
1144 SkRect localRect;
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001145 if (quad->fLocal.asRect(&localRect)) {
John Stiles5a2a7b32020-06-04 10:57:21 -04001146 fp = GrTextureEffect::MakeSubset(std::move(proxyView), alphaType, SkMatrix::I(),
1147 filter, *subset, localRect, caps);
Brian Salomonca6b2f42020-01-24 11:31:21 -05001148 } else {
John Stiles5a2a7b32020-06-04 10:57:21 -04001149 fp = GrTextureEffect::MakeSubset(std::move(proxyView), alphaType, SkMatrix::I(),
1150 filter, *subset, caps);
Brian Salomonca6b2f42020-01-24 11:31:21 -05001151 }
1152 } else {
Greg Danield2ccbb52020-02-05 10:45:39 -05001153 fp = GrTextureEffect::Make(std::move(proxyView), alphaType, SkMatrix::I(), filter);
Michael Ludwig22429f92019-06-27 10:44:48 -04001154 }
1155 fp = GrColorSpaceXformEffect::Make(std::move(fp), std::move(textureXform));
Brian Osman958a3bb2020-07-30 14:13:23 -04001156 fp = GrBlendFragmentProcessor::Make(std::move(fp), nullptr, SkBlendMode::kModulate);
Brian Salomonf19f9ca2019-09-18 15:54:26 -04001157 if (saturate == GrTextureOp::Saturate::kYes) {
John Stiles5a2a7b32020-06-04 10:57:21 -04001158 fp = GrClampFragmentProcessor::Make(std::move(fp), /*clampToPremul=*/false);
Brian Salomonf19f9ca2019-09-18 15:54:26 -04001159 }
John Stiles5933d7d2020-07-21 12:28:35 -04001160 paint.setColorFragmentProcessor(std::move(fp));
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001161 return GrFillRectOp::Make(context, std::move(paint), aaType, quad);
Michael Ludwig22429f92019-06-27 10:44:48 -04001162 }
1163}
1164
Robert Phillipse837e612019-11-15 11:02:50 -05001165// A helper class that assists in breaking up bulk API quad draws into manageable chunks.
1166class GrTextureOp::BatchSizeLimiter {
1167public:
1168 BatchSizeLimiter(GrRenderTargetContext* rtc,
Michael Ludwig7c12e282020-05-29 09:54:07 -04001169 const GrClip* clip,
Robert Phillipse837e612019-11-15 11:02:50 -05001170 GrRecordingContext* context,
1171 int numEntries,
1172 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -04001173 GrSamplerState::MipmapMode mm,
Robert Phillipse837e612019-11-15 11:02:50 -05001174 GrTextureOp::Saturate saturate,
1175 SkCanvas::SrcRectConstraint constraint,
1176 const SkMatrix& viewMatrix,
1177 sk_sp<GrColorSpaceXform> textureColorSpaceXform)
1178 : fRTC(rtc)
1179 , fClip(clip)
1180 , fContext(context)
1181 , fFilter(filter)
Brian Salomone69b9ef2020-07-22 11:18:06 -04001182 , fMipmapMode(mm)
Robert Phillipse837e612019-11-15 11:02:50 -05001183 , fSaturate(saturate)
1184 , fConstraint(constraint)
1185 , fViewMatrix(viewMatrix)
1186 , fTextureColorSpaceXform(textureColorSpaceXform)
Brian Salomone69b9ef2020-07-22 11:18:06 -04001187 , fNumLeft(numEntries) {}
Brian Salomon34169692017-08-28 15:32:01 -04001188
Michael Ludwigadb12e72019-12-04 16:19:18 -05001189 void createOp(GrRenderTargetContext::TextureSetEntry set[],
Robert Phillipse837e612019-11-15 11:02:50 -05001190 int clumpSize,
1191 GrAAType aaType) {
Michael Ludwig379e4962019-12-06 13:21:26 -05001192 int clumpProxyCount = proxy_run_count(&set[fNumClumped], clumpSize);
Brian Salomone69b9ef2020-07-22 11:18:06 -04001193 std::unique_ptr<GrDrawOp> op = TextureOp::Make(fContext,
1194 &set[fNumClumped],
1195 clumpSize,
1196 clumpProxyCount,
1197 fFilter,
1198 fMipmapMode,
1199 fSaturate,
1200 aaType,
1201 fConstraint,
1202 fViewMatrix,
Robert Phillipse837e612019-11-15 11:02:50 -05001203 fTextureColorSpaceXform);
1204 fRTC->addDrawOp(fClip, std::move(op));
1205
1206 fNumLeft -= clumpSize;
1207 fNumClumped += clumpSize;
1208 }
1209
1210 int numLeft() const { return fNumLeft; }
1211 int baseIndex() const { return fNumClumped; }
1212
1213private:
1214 GrRenderTargetContext* fRTC;
Michael Ludwig7c12e282020-05-29 09:54:07 -04001215 const GrClip* fClip;
Robert Phillipse837e612019-11-15 11:02:50 -05001216 GrRecordingContext* fContext;
1217 GrSamplerState::Filter fFilter;
Brian Salomone69b9ef2020-07-22 11:18:06 -04001218 GrSamplerState::MipmapMode fMipmapMode;
Robert Phillipse837e612019-11-15 11:02:50 -05001219 GrTextureOp::Saturate fSaturate;
1220 SkCanvas::SrcRectConstraint fConstraint;
1221 const SkMatrix& fViewMatrix;
1222 sk_sp<GrColorSpaceXform> fTextureColorSpaceXform;
1223
1224 int fNumLeft;
1225 int fNumClumped = 0; // also the offset for the start of the next clump
1226};
1227
1228// Greedily clump quad draws together until the index buffer limit is exceeded.
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001229void GrTextureOp::AddTextureSetOps(GrRenderTargetContext* rtc,
Michael Ludwig7c12e282020-05-29 09:54:07 -04001230 const GrClip* clip,
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001231 GrRecordingContext* context,
Michael Ludwigadb12e72019-12-04 16:19:18 -05001232 GrRenderTargetContext::TextureSetEntry set[],
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001233 int cnt,
Michael Ludwig379e4962019-12-06 13:21:26 -05001234 int proxyRunCnt,
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001235 GrSamplerState::Filter filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -04001236 GrSamplerState::MipmapMode mm,
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001237 Saturate saturate,
1238 SkBlendMode blendMode,
1239 GrAAType aaType,
1240 SkCanvas::SrcRectConstraint constraint,
1241 const SkMatrix& viewMatrix,
1242 sk_sp<GrColorSpaceXform> textureColorSpaceXform) {
Michael Ludwig379e4962019-12-06 13:21:26 -05001243 // Ensure that the index buffer limits are lower than the proxy and quad count limits of
1244 // the op's metadata so we don't need to worry about overflow.
Michael Ludwig4ef1ca12019-12-19 10:58:52 -05001245 SkDEBUGCODE(TextureOp::ValidateResourceLimits();)
Michael Ludwig379e4962019-12-06 13:21:26 -05001246 SkASSERT(proxy_run_count(set, cnt) == proxyRunCnt);
1247
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001248 // First check if we can support batches as a single op
1249 if (blendMode != SkBlendMode::kSrcOver ||
1250 !context->priv().caps()->dynamicStateArrayGeometryProcessorTextureSupport()) {
1251 // Append each entry as its own op; these may still be GrTextureOps if the blend mode is
1252 // src-over but the backend doesn't support dynamic state changes. Otherwise Make()
1253 // automatically creates the appropriate GrFillRectOp to emulate GrTextureOp.
1254 SkMatrix ctm;
1255 for (int i = 0; i < cnt; ++i) {
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001256 ctm = viewMatrix;
1257 if (set[i].fPreViewMatrix) {
1258 ctm.preConcat(*set[i].fPreViewMatrix);
1259 }
Robert Phillipse837e612019-11-15 11:02:50 -05001260
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001261 DrawQuad quad;
1262 quad.fEdgeFlags = set[i].fAAFlags;
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001263 if (set[i].fDstClipQuad) {
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001264 quad.fDevice = GrQuad::MakeFromSkQuad(set[i].fDstClipQuad, ctm);
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001265
1266 SkPoint srcPts[4];
1267 GrMapRectPoints(set[i].fDstRect, set[i].fSrcRect, set[i].fDstClipQuad, srcPts, 4);
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001268 quad.fLocal = GrQuad::MakeFromSkQuad(srcPts, SkMatrix::I());
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001269 } else {
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001270 quad.fDevice = GrQuad::MakeFromRect(set[i].fDstRect, ctm);
1271 quad.fLocal = GrQuad(set[i].fSrcRect);
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001272 }
1273
Brian Salomon2432d062020-04-16 20:48:09 -04001274 const SkRect* subset = constraint == SkCanvas::kStrict_SrcRectConstraint
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001275 ? &set[i].fSrcRect : nullptr;
1276
Brian Salomonfc118442019-11-22 19:09:27 -05001277 auto op = Make(context, set[i].fProxyView, set[i].fSrcAlphaType, textureColorSpaceXform,
Brian Salomone69b9ef2020-07-22 11:18:06 -04001278 filter, mm, set[i].fColor, saturate, blendMode, aaType, &quad, subset);
Michael Ludwigfe13ca32019-11-21 10:26:41 -05001279 rtc->addDrawOp(clip, std::move(op));
1280 }
1281 return;
1282 }
1283
1284 // Second check if we can always just make a single op and avoid the extra iteration
Robert Phillipse837e612019-11-15 11:02:50 -05001285 // needed to clump things together.
Brian Osman788b9162020-02-07 10:36:46 -05001286 if (cnt <= std::min(GrResourceProvider::MaxNumNonAAQuads(),
Robert Phillipse837e612019-11-15 11:02:50 -05001287 GrResourceProvider::MaxNumAAQuads())) {
Brian Salomone69b9ef2020-07-22 11:18:06 -04001288 auto op = TextureOp::Make(context, set, cnt, proxyRunCnt, filter, mm, saturate, aaType,
Robert Phillipse837e612019-11-15 11:02:50 -05001289 constraint, viewMatrix, std::move(textureColorSpaceXform));
1290 rtc->addDrawOp(clip, std::move(op));
1291 return;
1292 }
1293
Brian Salomone69b9ef2020-07-22 11:18:06 -04001294 BatchSizeLimiter state(rtc, clip, context, cnt, filter, mm, saturate, constraint, viewMatrix,
Robert Phillipse837e612019-11-15 11:02:50 -05001295 std::move(textureColorSpaceXform));
1296
1297 // kNone and kMSAA never get altered
1298 if (aaType == GrAAType::kNone || aaType == GrAAType::kMSAA) {
1299 // Clump these into series of MaxNumNonAAQuads-sized GrTextureOps
1300 while (state.numLeft() > 0) {
Brian Osman788b9162020-02-07 10:36:46 -05001301 int clumpSize = std::min(state.numLeft(), GrResourceProvider::MaxNumNonAAQuads());
Robert Phillipse837e612019-11-15 11:02:50 -05001302
1303 state.createOp(set, clumpSize, aaType);
1304 }
1305 } else {
1306 // kCoverage can be downgraded to kNone. Note that the following is conservative. kCoverage
1307 // can also get downgraded to kNone if all the quads are on integer coordinates and
1308 // axis-aligned.
1309 SkASSERT(aaType == GrAAType::kCoverage);
1310
1311 while (state.numLeft() > 0) {
1312 GrAAType runningAA = GrAAType::kNone;
1313 bool clumped = false;
1314
1315 for (int i = 0; i < state.numLeft(); ++i) {
1316 int absIndex = state.baseIndex() + i;
1317
Robert Phillips6bf11252020-07-31 12:15:00 -04001318 if (set[absIndex].fAAFlags != GrQuadAAFlags::kNone ||
1319 runningAA == GrAAType::kCoverage) {
Robert Phillipse837e612019-11-15 11:02:50 -05001320
1321 if (i >= GrResourceProvider::MaxNumAAQuads()) {
1322 // Here we either need to boost the AA type to kCoverage, but doing so with
1323 // all the accumulated quads would overflow, or we have a set of AA quads
1324 // that has just gotten too large. In either case, calve off the existing
1325 // quads as their own TextureOp.
1326 state.createOp(
1327 set,
1328 runningAA == GrAAType::kNone ? i : GrResourceProvider::MaxNumAAQuads(),
1329 runningAA); // maybe downgrading AA here
1330 clumped = true;
1331 break;
1332 }
1333
1334 runningAA = GrAAType::kCoverage;
1335 } else if (runningAA == GrAAType::kNone) {
1336
1337 if (i >= GrResourceProvider::MaxNumNonAAQuads()) {
1338 // Here we've found a consistent batch of non-AA quads that has gotten too
1339 // large. Calve it off as its own GrTextureOp.
1340 state.createOp(set, GrResourceProvider::MaxNumNonAAQuads(),
1341 GrAAType::kNone); // definitely downgrading AA here
1342 clumped = true;
1343 break;
1344 }
1345 }
1346 }
1347
1348 if (!clumped) {
1349 // We ran through the above loop w/o hitting a limit. Spit out this last clump of
1350 // quads and call it a day.
1351 state.createOp(set, state.numLeft(), runningAA); // maybe downgrading AA here
1352 }
1353 }
1354 }
1355}
Robert Phillipsae01f622019-11-13 15:56:31 +00001356
Brian Salomon34169692017-08-28 15:32:01 -04001357#if GR_TEST_UTILS
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04001358#include "include/gpu/GrRecordingContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -05001359#include "src/gpu/GrProxyProvider.h"
1360#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomon34169692017-08-28 15:32:01 -04001361
1362GR_DRAW_OP_TEST_DEFINE(TextureOp) {
Brian Salomona56a7462020-02-07 14:17:25 -05001363 SkISize dims;
1364 dims.fHeight = random->nextULessThan(90) + 10;
1365 dims.fWidth = random->nextULessThan(90) + 10;
Brian Salomon2a4f9832018-03-03 22:43:43 -05001366 auto origin = random->nextBool() ? kTopLeft_GrSurfaceOrigin : kBottomLeft_GrSurfaceOrigin;
Brian Salomon7e67dca2020-07-21 09:27:25 -04001367 GrMipmapped mipMapped = random->nextBool() ? GrMipmapped::kYes : GrMipmapped::kNo;
Greg Daniel09c94002018-06-08 22:11:51 +00001368 SkBackingFit fit = SkBackingFit::kExact;
Brian Salomon7e67dca2020-07-21 09:27:25 -04001369 if (mipMapped == GrMipmapped::kNo) {
Greg Daniel09c94002018-06-08 22:11:51 +00001370 fit = random->nextBool() ? SkBackingFit::kApprox : SkBackingFit::kExact;
1371 }
Greg Daniel4065d452018-11-16 15:43:41 -05001372 const GrBackendFormat format =
Robert Phillips0a15cc62019-07-30 12:49:10 -04001373 context->priv().caps()->getDefaultBackendFormat(GrColorType::kRGBA_8888,
1374 GrRenderable::kNo);
Robert Phillips9da87e02019-02-04 13:26:26 -05001375 GrProxyProvider* proxyProvider = context->priv().proxyProvider();
Brian Salomone8a766b2019-07-19 14:24:36 -04001376 sk_sp<GrTextureProxy> proxy = proxyProvider->createProxy(
Brian Salomondf1bd6d2020-03-26 20:37:01 -04001377 format, dims, GrRenderable::kNo, 1, mipMapped, fit, SkBudgeted::kNo, GrProtected::kNo,
1378 GrInternalSurfaceFlags::kNone);
Robert Phillips0bd24dc2018-01-16 08:06:32 -05001379
Brian Salomon34169692017-08-28 15:32:01 -04001380 SkRect rect = GrTest::TestRect(random);
1381 SkRect srcRect;
1382 srcRect.fLeft = random->nextRangeScalar(0.f, proxy->width() / 2.f);
1383 srcRect.fRight = random->nextRangeScalar(0.f, proxy->width()) + proxy->width() / 2.f;
1384 srcRect.fTop = random->nextRangeScalar(0.f, proxy->height() / 2.f);
1385 srcRect.fBottom = random->nextRangeScalar(0.f, proxy->height()) + proxy->height() / 2.f;
1386 SkMatrix viewMatrix = GrTest::TestMatrixPreservesRightAngles(random);
Brian Osman3d139a42018-11-19 10:42:10 -05001387 SkPMColor4f color = SkPMColor4f::FromBytes_RGBA(SkColorToPremulGrColor(random->nextU()));
Brian Salomon2bbdcc42017-09-07 12:36:34 -04001388 GrSamplerState::Filter filter = (GrSamplerState::Filter)random->nextULessThan(
Brian Salomone69b9ef2020-07-22 11:18:06 -04001389 static_cast<uint32_t>(GrSamplerState::Filter::kLast) + 1);
1390 GrSamplerState::MipmapMode mm = GrSamplerState::MipmapMode::kNone;
1391 if (mipMapped == GrMipmapped::kYes) {
1392 mm = (GrSamplerState::MipmapMode)random->nextULessThan(
1393 static_cast<uint32_t>(GrSamplerState::MipmapMode::kLast) + 1);
Greg Daniel09c94002018-06-08 22:11:51 +00001394 }
Brian Salomone69b9ef2020-07-22 11:18:06 -04001395
Brian Osman3ebd3542018-07-30 14:36:53 -04001396 auto texXform = GrTest::TestColorXform(random);
Brian Salomon485b8c62018-01-12 15:11:06 -05001397 GrAAType aaType = GrAAType::kNone;
1398 if (random->nextBool()) {
Chris Dalton6ce447a2019-06-23 18:07:38 -06001399 aaType = (numSamples > 1) ? GrAAType::kMSAA : GrAAType::kCoverage;
Brian Salomon485b8c62018-01-12 15:11:06 -05001400 }
Brian Salomon2213ee92018-10-02 10:44:21 -04001401 GrQuadAAFlags aaFlags = GrQuadAAFlags::kNone;
1402 aaFlags |= random->nextBool() ? GrQuadAAFlags::kLeft : GrQuadAAFlags::kNone;
1403 aaFlags |= random->nextBool() ? GrQuadAAFlags::kTop : GrQuadAAFlags::kNone;
1404 aaFlags |= random->nextBool() ? GrQuadAAFlags::kRight : GrQuadAAFlags::kNone;
1405 aaFlags |= random->nextBool() ? GrQuadAAFlags::kBottom : GrQuadAAFlags::kNone;
Brian Salomon2432d062020-04-16 20:48:09 -04001406 bool useSubset = random->nextBool();
Brian Salomonf19f9ca2019-09-18 15:54:26 -04001407 auto saturate = random->nextBool() ? GrTextureOp::Saturate::kYes : GrTextureOp::Saturate::kNo;
Greg Daniel549325c2019-10-30 16:19:20 -04001408 GrSurfaceProxyView proxyView(
1409 std::move(proxy), origin,
Greg Daniel14b57212019-12-17 16:18:06 -05001410 context->priv().caps()->getReadSwizzle(format, GrColorType::kRGBA_8888));
Brian Salomonfc118442019-11-22 19:09:27 -05001411 auto alphaType = static_cast<SkAlphaType>(
1412 random->nextRangeU(kUnknown_SkAlphaType + 1, kLastEnum_SkAlphaType));
Greg Daniel549325c2019-10-30 16:19:20 -04001413
Michael Ludwig6b45c5d2020-02-07 09:56:38 -05001414 DrawQuad quad = {GrQuad::MakeFromRect(rect, viewMatrix), GrQuad(srcRect), aaFlags};
Brian Salomonfc118442019-11-22 19:09:27 -05001415 return GrTextureOp::Make(context, std::move(proxyView), alphaType, std::move(texXform), filter,
Brian Salomone69b9ef2020-07-22 11:18:06 -04001416 mm, color, saturate, SkBlendMode::kSrcOver, aaType, &quad,
1417 useSubset ? &srcRect : nullptr);
Brian Salomon34169692017-08-28 15:32:01 -04001418}
1419
1420#endif