blob: b2270c78328b6713a003bf0ab9fc75823cdf0dc2 [file] [log] [blame]
robertphillips@google.comf4c2c522012-04-27 12:08:47 +00001/*
2 * Copyright 2012 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Robert Phillips17dc6582021-08-17 11:57:31 -04008#include "src/gpu/ops/SoftwarePathRenderer.h"
Brian Salomon99a813c2020-03-02 12:50:47 -05009
Robert Phillipsb7bfbc22020-07-01 12:55:01 -040010#include "include/gpu/GrDirectContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/private/SkSemaphore.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/core/SkTaskGroup.h"
13#include "src/core/SkTraceEvent.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040014#include "src/gpu/GrAuditTrail.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrCaps.h"
16#include "src/gpu/GrClip.h"
Adlai Holler9e2c50e2021-02-09 14:41:52 -050017#include "src/gpu/GrDeferredProxyUploader.h"
Adlai Hollera0693042020-10-14 11:23:11 -040018#include "src/gpu/GrDirectContextPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050019#include "src/gpu/GrGpuResourcePriv.h"
20#include "src/gpu/GrOpFlushState.h"
21#include "src/gpu/GrProxyProvider.h"
22#include "src/gpu/GrRecordingContextPriv.h"
23#include "src/gpu/GrSWMaskHelper.h"
Robert Phillips62214f72021-06-15 10:12:51 -040024#include "src/gpu/GrUtil.h"
Brian Salomon99a813c2020-03-02 12:50:47 -050025#include "src/gpu/SkGr.h"
Robert Phillips550de7f2021-07-06 16:28:52 -040026#include "src/gpu/effects/GrTextureEffect.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000027#include "src/gpu/geometry/GrStyledShape.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050028#include "src/gpu/ops/GrDrawOp.h"
Robert Phillips4dca8312021-07-28 15:13:20 -040029#include "src/gpu/v1/SurfaceDrawContext_v1.h"
robertphillips@google.comf4c2c522012-04-27 12:08:47 +000030
Robert Phillips17dc6582021-08-17 11:57:31 -040031namespace {
robertphillips@google.comf4c2c522012-04-27 12:08:47 +000032
Robert Phillips17dc6582021-08-17 11:57:31 -040033/**
34 * Payload class for use with GrTDeferredProxyUploader. The software path renderer only draws
35 * a single path into the mask texture. This stores all of the information needed by the worker
36 * thread's call to drawShape (see below, in onDrawPath).
37 */
38class SoftwarePathData {
39public:
40 SoftwarePathData(const SkIRect& maskBounds, const SkMatrix& viewMatrix,
41 const GrStyledShape& shape, GrAA aa)
42 : fMaskBounds(maskBounds)
43 , fViewMatrix(viewMatrix)
44 , fShape(shape)
45 , fAA(aa) {}
46
47 const SkIRect& getMaskBounds() const { return fMaskBounds; }
48 const SkMatrix* getViewMatrix() const { return &fViewMatrix; }
49 const GrStyledShape& getShape() const { return fShape; }
50 GrAA getAA() const { return fAA; }
51
52private:
53 SkIRect fMaskBounds;
54 SkMatrix fViewMatrix;
55 GrStyledShape fShape;
56 GrAA fAA;
57};
58
59bool get_unclipped_shape_dev_bounds(const GrStyledShape& shape, const SkMatrix& matrix,
60 SkIRect* devBounds) {
bsalomon39ef7fb2016-09-21 11:16:05 -070061 SkRect shapeBounds = shape.styledBounds();
62 if (shapeBounds.isEmpty()) {
63 return false;
64 }
65 SkRect shapeDevBounds;
66 matrix.mapRect(&shapeDevBounds, shapeBounds);
Brian Salomonc1c607e2016-12-20 11:41:43 -050067 // Even though these are "unclipped" bounds we still clip to the int32_t range.
68 // This is the largest int32_t that is representable exactly as a float. The next 63 larger ints
69 // would round down to this value when cast to a float, but who really cares.
70 // INT32_MIN is exactly representable.
71 static constexpr int32_t kMaxInt = 2147483520;
72 if (!shapeDevBounds.intersect(SkRect::MakeLTRB(INT32_MIN, INT32_MIN, kMaxInt, kMaxInt))) {
73 return false;
74 }
Jim Van Verthba7cf292017-11-02 20:18:56 +000075 // Make sure that the resulting SkIRect can have representable width and height
76 if (SkScalarRoundToInt(shapeDevBounds.width()) > kMaxInt ||
77 SkScalarRoundToInt(shapeDevBounds.height()) > kMaxInt) {
78 return false;
79 }
bsalomon39ef7fb2016-09-21 11:16:05 -070080 shapeDevBounds.roundOut(devBounds);
81 return true;
82}
83
Robert Phillips17dc6582021-08-17 11:57:31 -040084GrSurfaceProxyView make_deferred_mask_texture_view(GrRecordingContext* rContext,
85 SkBackingFit fit,
86 SkISize dimensions) {
87 GrProxyProvider* proxyProvider = rContext->priv().proxyProvider();
88 const GrCaps* caps = rContext->priv().caps();
89
90 const GrBackendFormat format = caps->getDefaultBackendFormat(GrColorType::kAlpha_8,
91 GrRenderable::kNo);
92
93 GrSwizzle swizzle = caps->getReadSwizzle(format, GrColorType::kAlpha_8);
94
95 auto proxy =
96 proxyProvider->createProxy(format, dimensions, GrRenderable::kNo, 1, GrMipmapped::kNo,
97 fit, SkBudgeted::kYes, GrProtected::kNo);
98 return {std::move(proxy), kTopLeft_GrSurfaceOrigin, swizzle};
99}
100
101
102} // anonymous namespace
103
104namespace skgpu::v1 {
105
106////////////////////////////////////////////////////////////////////////////////
Robert Phillipsdb0ec082021-08-19 12:30:12 -0400107PathRenderer::CanDrawPath SoftwarePathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
Robert Phillips17dc6582021-08-17 11:57:31 -0400108 // Pass on any style that applies. The caller will apply the style if a suitable renderer is
109 // not found and try again with the new GrStyledShape.
110 if (!args.fShape->style().applies() && SkToBool(fProxyProvider) &&
111 (args.fAAType == GrAAType::kCoverage || args.fAAType == GrAAType::kNone)) {
112 // This is the fallback renderer for when a path is too complicated for the GPU ones.
113 return CanDrawPath::kAsBackup;
114 }
115 return CanDrawPath::kNo;
116}
117
118////////////////////////////////////////////////////////////////////////////////
119
bsalomon39ef7fb2016-09-21 11:16:05 -0700120// Gets the shape bounds, the clip bounds, and the intersection (if any). Returns false if there
121// is no intersection.
Robert Phillips17dc6582021-08-17 11:57:31 -0400122bool SoftwarePathRenderer::GetShapeAndClipBounds(SurfaceDrawContext* sdc,
123 const GrClip* clip,
124 const GrStyledShape& shape,
125 const SkMatrix& matrix,
126 SkIRect* unclippedDevShapeBounds,
127 SkIRect* clippedDevShapeBounds,
128 SkIRect* devClipBounds) {
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000129 // compute bounds as intersection of rt size, clip, and path
Michael Ludwige06a8972020-06-11 10:29:00 -0400130 *devClipBounds = clip ? clip->getConservativeBounds()
Robert Phillips4dca8312021-07-28 15:13:20 -0400131 : SkIRect::MakeWH(sdc->width(), sdc->height());
robertphillips@google.com7b112892012-07-31 15:18:21 +0000132
bsalomon39ef7fb2016-09-21 11:16:05 -0700133 if (!get_unclipped_shape_dev_bounds(shape, matrix, unclippedDevShapeBounds)) {
Brian Salomon44207f32020-01-06 15:20:18 -0500134 *unclippedDevShapeBounds = SkIRect::MakeEmpty();
135 *clippedDevShapeBounds = SkIRect::MakeEmpty();
robertphillips@google.com3e11c0b2012-07-11 18:20:35 +0000136 return false;
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000137 }
bsalomon39ef7fb2016-09-21 11:16:05 -0700138 if (!clippedDevShapeBounds->intersect(*devClipBounds, *unclippedDevShapeBounds)) {
Brian Salomon44207f32020-01-06 15:20:18 -0500139 *clippedDevShapeBounds = SkIRect::MakeEmpty();
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000140 return false;
141 }
142 return true;
143}
144
145////////////////////////////////////////////////////////////////////////////////
robertphillips976f5f02016-06-03 10:59:20 -0700146
Robert Phillips17dc6582021-08-17 11:57:31 -0400147void SoftwarePathRenderer::DrawNonAARect(SurfaceDrawContext* sdc,
148 GrPaint&& paint,
149 const GrUserStencilSettings& userStencilSettings,
150 const GrClip* clip,
151 const SkMatrix& viewMatrix,
152 const SkRect& rect,
153 const SkMatrix& localMatrix) {
Robert Phillips4dca8312021-07-28 15:13:20 -0400154 sdc->stencilRect(clip, &userStencilSettings, std::move(paint), GrAA::kNo,
155 viewMatrix, rect, &localMatrix);
robertphillips976f5f02016-06-03 10:59:20 -0700156}
157
Robert Phillips17dc6582021-08-17 11:57:31 -0400158void SoftwarePathRenderer::DrawAroundInvPath(SurfaceDrawContext* sdc,
159 GrPaint&& paint,
160 const GrUserStencilSettings& userStencilSettings,
161 const GrClip* clip,
162 const SkMatrix& viewMatrix,
163 const SkIRect& devClipBounds,
164 const SkIRect& devPathBounds) {
joshualittd27f73e2014-12-29 07:43:36 -0800165 SkMatrix invert;
joshualitt8059eb92014-12-29 15:10:07 -0800166 if (!viewMatrix.invert(&invert)) {
bsalomon@google.come3d32162012-07-20 13:37:06 +0000167 return;
168 }
joshualittd27f73e2014-12-29 07:43:36 -0800169
commit-bot@chromium.orgfd03d4a2013-07-17 21:39:42 +0000170 SkRect rect;
robertphillips@google.com7b112892012-07-31 15:18:21 +0000171 if (devClipBounds.fTop < devPathBounds.fTop) {
Mike Reed92b33352019-08-24 19:39:13 -0400172 rect.setLTRB(SkIntToScalar(devClipBounds.fLeft), SkIntToScalar(devClipBounds.fTop),
173 SkIntToScalar(devClipBounds.fRight), SkIntToScalar(devPathBounds.fTop));
Robert Phillips4dca8312021-07-28 15:13:20 -0400174 DrawNonAARect(sdc, GrPaint::Clone(paint), userStencilSettings, clip,
Brian Salomonb74ef032017-08-10 12:46:01 -0400175 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000176 }
robertphillips@google.com7b112892012-07-31 15:18:21 +0000177 if (devClipBounds.fLeft < devPathBounds.fLeft) {
Mike Reed92b33352019-08-24 19:39:13 -0400178 rect.setLTRB(SkIntToScalar(devClipBounds.fLeft), SkIntToScalar(devPathBounds.fTop),
179 SkIntToScalar(devPathBounds.fLeft), SkIntToScalar(devPathBounds.fBottom));
Robert Phillips4dca8312021-07-28 15:13:20 -0400180 DrawNonAARect(sdc, GrPaint::Clone(paint), userStencilSettings, clip,
Brian Salomonb74ef032017-08-10 12:46:01 -0400181 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000182 }
robertphillips@google.com7b112892012-07-31 15:18:21 +0000183 if (devClipBounds.fRight > devPathBounds.fRight) {
Mike Reed92b33352019-08-24 19:39:13 -0400184 rect.setLTRB(SkIntToScalar(devPathBounds.fRight), SkIntToScalar(devPathBounds.fTop),
185 SkIntToScalar(devClipBounds.fRight), SkIntToScalar(devPathBounds.fBottom));
Robert Phillips4dca8312021-07-28 15:13:20 -0400186 DrawNonAARect(sdc, GrPaint::Clone(paint), userStencilSettings, clip,
Brian Salomonb74ef032017-08-10 12:46:01 -0400187 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000188 }
robertphillips@google.com7b112892012-07-31 15:18:21 +0000189 if (devClipBounds.fBottom > devPathBounds.fBottom) {
Mike Reed92b33352019-08-24 19:39:13 -0400190 rect.setLTRB(SkIntToScalar(devClipBounds.fLeft), SkIntToScalar(devPathBounds.fBottom),
191 SkIntToScalar(devClipBounds.fRight), SkIntToScalar(devClipBounds.fBottom));
Robert Phillips4dca8312021-07-28 15:13:20 -0400192 DrawNonAARect(sdc, std::move(paint), userStencilSettings, clip,
robertphillips976f5f02016-06-03 10:59:20 -0700193 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000194 }
195}
196
Robert Phillips17dc6582021-08-17 11:57:31 -0400197void SoftwarePathRenderer::DrawToTargetWithShapeMask(
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500198 GrSurfaceProxyView view,
Robert Phillips17dc6582021-08-17 11:57:31 -0400199 SurfaceDrawContext* sdc,
Brian Osmanc7da1462017-08-17 16:14:25 -0400200 GrPaint&& paint,
201 const GrUserStencilSettings& userStencilSettings,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400202 const GrClip* clip,
Brian Osmanc7da1462017-08-17 16:14:25 -0400203 const SkMatrix& viewMatrix,
204 const SkIPoint& textureOriginInDeviceSpace,
205 const SkIRect& deviceSpaceRectToDraw) {
206 SkMatrix invert;
207 if (!viewMatrix.invert(&invert)) {
208 return;
209 }
210
Brian Salomonb43d6992021-01-05 14:37:40 -0500211 view.concatSwizzle(GrSwizzle("aaaa"));
212
Brian Osmanc7da1462017-08-17 16:14:25 -0400213 SkRect dstRect = SkRect::Make(deviceSpaceRectToDraw);
214
215 // We use device coords to compute the texture coordinates. We take the device coords and apply
216 // a translation so that the top-left of the device bounds maps to 0,0, and then a scaling
217 // matrix to normalized coords.
Mike Reed1f607332020-05-21 12:11:27 -0400218 SkMatrix maskMatrix = SkMatrix::Translate(SkIntToScalar(-textureOriginInDeviceSpace.fX),
Brian Osmanc7da1462017-08-17 16:14:25 -0400219 SkIntToScalar(-textureOriginInDeviceSpace.fY));
220 maskMatrix.preConcat(viewMatrix);
Greg Danield2ccbb52020-02-05 10:45:39 -0500221
John Stiles41d91b62020-07-21 14:39:40 -0400222 paint.setCoverageFragmentProcessor(GrTextureEffect::Make(
Greg Danield2ccbb52020-02-05 10:45:39 -0500223 std::move(view), kPremul_SkAlphaType, maskMatrix, GrSamplerState::Filter::kNearest));
Robert Phillips4dca8312021-07-28 15:13:20 -0400224 DrawNonAARect(sdc, std::move(paint), userStencilSettings, clip, SkMatrix::I(),
Brian Osmanf9810662017-08-30 10:02:10 -0400225 dstRect, invert);
226}
227
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000228////////////////////////////////////////////////////////////////////////////////
229// return true on success; false on failure
Robert Phillips17dc6582021-08-17 11:57:31 -0400230bool SoftwarePathRenderer::onDrawPath(const DrawPathArgs& args) {
Robert Phillipsa92913e2021-07-12 16:31:52 -0400231 GR_AUDIT_TRAIL_AUTO_FRAME(args.fContext->priv().auditTrail(),
Robert Phillips17dc6582021-08-17 11:57:31 -0400232 "SoftwarePathRenderer::onDrawPath");
Robert Phillipsa92913e2021-07-12 16:31:52 -0400233
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500234 if (!fProxyProvider) {
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000235 return false;
236 }
237
caryclarkd6562002016-07-27 12:02:07 -0700238 SkASSERT(!args.fShape->style().applies());
Robert Phillips20390c32018-08-17 11:01:03 -0400239 // We really need to know if the shape will be inverse filled or not
Eric Karl5c779752017-05-08 12:02:07 -0700240 // If the path is hairline, ignore inverse fill.
Robert Phillips20390c32018-08-17 11:01:03 -0400241 bool inverseFilled = args.fShape->inverseFilled() &&
Robert Phillips62214f72021-06-15 10:12:51 -0400242 !GrIsStrokeHairlineOrEquivalent(args.fShape->style(),
243 *args.fViewMatrix, nullptr);
bsalomon8acedde2016-06-24 10:42:16 -0700244
bsalomon39ef7fb2016-09-21 11:16:05 -0700245 SkIRect unclippedDevShapeBounds, clippedDevShapeBounds, devClipBounds;
246 // To prevent overloading the cache with entries during animations we limit the cache of masks
247 // to cases where the matrix preserves axis alignment.
248 bool useCache = fAllowCaching && !inverseFilled && args.fViewMatrix->preservesAxisAlignment() &&
Chris Dalton6ce447a2019-06-23 18:07:38 -0600249 args.fShape->hasUnstyledKey() && (GrAAType::kCoverage == args.fAAType);
bsalomon39ef7fb2016-09-21 11:16:05 -0700250
John Stiles0fbc6a32021-06-04 14:40:57 -0400251 if (!GetShapeAndClipBounds(args.fSurfaceDrawContext,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400252 args.fClip, *args.fShape,
Robert Phillips20390c32018-08-17 11:01:03 -0400253 *args.fViewMatrix, &unclippedDevShapeBounds,
254 &clippedDevShapeBounds,
255 &devClipBounds)) {
bsalomon8acedde2016-06-24 10:42:16 -0700256 if (inverseFilled) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400257 DrawAroundInvPath(args.fSurfaceDrawContext, std::move(args.fPaint),
Michael Ludwig7c12e282020-05-29 09:54:07 -0400258 *args.fUserStencilSettings, args.fClip, *args.fViewMatrix,
Brian Salomon82f44312017-01-11 13:42:54 -0500259 devClipBounds, unclippedDevShapeBounds);
bsalomon@google.com276c1fa2012-06-19 13:22:45 +0000260 }
261 return true;
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000262 }
robertphillips@google.com366f1c62012-06-29 21:38:47 +0000263
bsalomon39ef7fb2016-09-21 11:16:05 -0700264 const SkIRect* boundsForMask = &clippedDevShapeBounds;
265 if (useCache) {
266 // Use the cache only if >50% of the path is visible.
267 int unclippedWidth = unclippedDevShapeBounds.width();
268 int unclippedHeight = unclippedDevShapeBounds.height();
Brian Osman1a0ea732017-09-28 11:53:03 -0400269 int64_t unclippedArea = sk_64_mul(unclippedWidth, unclippedHeight);
270 int64_t clippedArea = sk_64_mul(clippedDevShapeBounds.width(),
271 clippedDevShapeBounds.height());
John Stiles0fbc6a32021-06-04 14:40:57 -0400272 int maxTextureSize = args.fSurfaceDrawContext->caps()->maxTextureSize();
bsalomon39ef7fb2016-09-21 11:16:05 -0700273 if (unclippedArea > 2 * clippedArea || unclippedWidth > maxTextureSize ||
274 unclippedHeight > maxTextureSize) {
275 useCache = false;
276 } else {
277 boundsForMask = &unclippedDevShapeBounds;
278 }
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000279 }
280
bsalomon39ef7fb2016-09-21 11:16:05 -0700281 GrUniqueKey maskKey;
bsalomon39ef7fb2016-09-21 11:16:05 -0700282 if (useCache) {
283 // We require the upper left 2x2 of the matrix to match exactly for a cache hit.
284 SkScalar sx = args.fViewMatrix->get(SkMatrix::kMScaleX);
285 SkScalar sy = args.fViewMatrix->get(SkMatrix::kMScaleY);
286 SkScalar kx = args.fViewMatrix->get(SkMatrix::kMSkewX);
287 SkScalar ky = args.fViewMatrix->get(SkMatrix::kMSkewY);
Stan Iliev67cd6732017-08-15 17:10:26 -0400288 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
Robert Phillipsc5727d82020-05-05 10:37:20 -0400289 GrUniqueKey::Builder builder(&maskKey, kDomain, 7 + args.fShape->unstyledKeySize(),
Brian Osmana4425262018-07-26 13:37:53 -0400290 "SW Path Mask");
Robert Phillipsc5727d82020-05-05 10:37:20 -0400291 builder[0] = boundsForMask->width();
292 builder[1] = boundsForMask->height();
Robert Phillipsd58a2702020-05-01 12:27:56 -0400293
Stan Iliev67cd6732017-08-15 17:10:26 -0400294#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
295 // Fractional translate does not affect caching on Android. This is done for better cache
296 // hit ratio and speed, but it is matching HWUI behavior, which doesn't consider the matrix
297 // at all when caching paths.
Brian Osmana4425262018-07-26 13:37:53 -0400298 SkFixed fracX = 0;
299 SkFixed fracY = 0;
Stan Iliev67cd6732017-08-15 17:10:26 -0400300#else
bsalomon39ef7fb2016-09-21 11:16:05 -0700301 SkScalar tx = args.fViewMatrix->get(SkMatrix::kMTransX);
302 SkScalar ty = args.fViewMatrix->get(SkMatrix::kMTransY);
303 // Allow 8 bits each in x and y of subpixel positioning.
304 SkFixed fracX = SkScalarToFixed(SkScalarFraction(tx)) & 0x0000FF00;
305 SkFixed fracY = SkScalarToFixed(SkScalarFraction(ty)) & 0x0000FF00;
Stan Iliev67cd6732017-08-15 17:10:26 -0400306#endif
Robert Phillipsc5727d82020-05-05 10:37:20 -0400307 builder[2] = SkFloat2Bits(sx);
308 builder[3] = SkFloat2Bits(sy);
309 builder[4] = SkFloat2Bits(kx);
310 builder[5] = SkFloat2Bits(ky);
Brian Osmana4425262018-07-26 13:37:53 -0400311 // Distinguish between hairline and filled paths. For hairlines, we also need to include
312 // the cap. (SW grows hairlines by 0.5 pixel with round and square caps). Note that
313 // stroke-and-fill of hairlines is turned into pure fill by SkStrokeRec, so this covers
314 // all cases we might see.
315 uint32_t styleBits = args.fShape->style().isSimpleHairline() ?
316 ((args.fShape->style().strokeRec().getCap() << 1) | 1) : 0;
Robert Phillipsc5727d82020-05-05 10:37:20 -0400317 builder[6] = fracX | (fracY >> 8) | (styleBits << 16);
318 args.fShape->writeUnstyledKey(&builder[7]);
bsalomon39ef7fb2016-09-21 11:16:05 -0700319 }
320
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500321 GrSurfaceProxyView view;
bsalomon39ef7fb2016-09-21 11:16:05 -0700322 if (useCache) {
John Stiles7c3fa4e2021-08-10 17:00:56 -0400323 sk_sp<GrTextureProxy> proxy = fProxyProvider->findOrCreateProxyByUniqueKey(maskKey);
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500324 if (proxy) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400325 GrSwizzle swizzle = args.fSurfaceDrawContext->caps()->getReadSwizzle(
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500326 proxy->backendFormat(), GrColorType::kAlpha_8);
327 view = {std::move(proxy), kTopLeft_GrSurfaceOrigin, swizzle};
Robert Phillips273f1072020-05-05 13:03:07 -0400328 args.fContext->priv().stats()->incNumPathMasksCacheHits();
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500329 }
bsalomon39ef7fb2016-09-21 11:16:05 -0700330 }
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500331 if (!view) {
Robert Phillips417b7f42016-12-14 09:12:13 -0500332 SkBackingFit fit = useCache ? SkBackingFit::kExact : SkBackingFit::kApprox;
Chris Dalton6ce447a2019-06-23 18:07:38 -0600333 GrAA aa = GrAA(GrAAType::kCoverage == args.fAAType);
Adlai Hollercc25d532021-02-10 13:58:34 +0000334
335 SkTaskGroup* taskGroup = nullptr;
336 if (auto direct = args.fContext->asDirectContext()) {
337 taskGroup = direct->priv().getTaskGroup();
338 }
339
340 if (taskGroup) {
341 view = make_deferred_mask_texture_view(args.fContext, fit, boundsForMask->size());
342 if (!view) {
343 return false;
344 }
345
346 auto uploader = std::make_unique<GrTDeferredProxyUploader<SoftwarePathData>>(
347 *boundsForMask, *args.fViewMatrix, *args.fShape, aa);
348 GrTDeferredProxyUploader<SoftwarePathData>* uploaderRaw = uploader.get();
349
350 auto drawAndUploadMask = [uploaderRaw] {
351 TRACE_EVENT0("skia.gpu", "Threaded SW Mask Render");
352 GrSWMaskHelper helper(uploaderRaw->getPixels());
353 if (helper.init(uploaderRaw->data().getMaskBounds())) {
354 helper.drawShape(uploaderRaw->data().getShape(),
355 *uploaderRaw->data().getViewMatrix(),
356 SkRegion::kReplace_Op, uploaderRaw->data().getAA(), 0xFF);
357 } else {
358 SkDEBUGFAIL("Unable to allocate SW mask.");
359 }
360 uploaderRaw->signalAndFreeData();
361 };
362 taskGroup->add(std::move(drawAndUploadMask));
363 view.asTextureProxy()->texPriv().setDeferredUploader(std::move(uploader));
364 } else {
365 GrSWMaskHelper helper;
366 if (!helper.init(*boundsForMask)) {
367 return false;
368 }
369 helper.drawShape(*args.fShape, *args.fViewMatrix, SkRegion::kReplace_Op, aa, 0xFF);
370 view = helper.toTextureView(args.fContext, fit);
371 }
Brian Osmanf9810662017-08-30 10:02:10 -0400372
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500373 if (!view) {
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500374 return false;
375 }
376 if (useCache) {
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500377 SkASSERT(view.origin() == kTopLeft_GrSurfaceOrigin);
Brian Salomon4282d292020-02-24 09:39:32 -0500378
379 // We will add an invalidator to the path so that if the path goes away we will
Brian Salomon99a813c2020-03-02 12:50:47 -0500380 // delete or recycle the mask texture.
381 auto listener = GrMakeUniqueKeyInvalidationListener(&maskKey,
382 args.fContext->priv().contextID());
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500383 fProxyProvider->assignUniqueKeyToProxy(maskKey, view.asTextureProxy());
Brian Salomon99a813c2020-03-02 12:50:47 -0500384 args.fShape->addGenIDChangeListener(std::move(listener));
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500385 }
Robert Phillips273f1072020-05-05 13:03:07 -0400386
387 args.fContext->priv().stats()->incNumPathMasksGenerated();
bsalomon39ef7fb2016-09-21 11:16:05 -0700388 }
Greg Daniel9f0dfbd2020-02-10 11:47:11 -0500389 SkASSERT(view);
bsalomon8acedde2016-06-24 10:42:16 -0700390 if (inverseFilled) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400391 DrawAroundInvPath(args.fSurfaceDrawContext, GrPaint::Clone(args.fPaint),
Michael Ludwig7c12e282020-05-29 09:54:07 -0400392 *args.fUserStencilSettings, args.fClip, *args.fViewMatrix, devClipBounds,
Brian Salomon82f44312017-01-11 13:42:54 -0500393 unclippedDevShapeBounds);
robertphillips@google.com5dfb6722012-07-09 16:32:28 +0000394 }
John Stiles0fbc6a32021-06-04 14:40:57 -0400395 DrawToTargetWithShapeMask(std::move(view), args.fSurfaceDrawContext, std::move(args.fPaint),
Michael Ludwig7c12e282020-05-29 09:54:07 -0400396 *args.fUserStencilSettings, args.fClip, *args.fViewMatrix,
Brian Salomonfc118442019-11-22 19:09:27 -0500397 SkIPoint{boundsForMask->fLeft, boundsForMask->fTop}, *boundsForMask);
robertphillips@google.com5dfb6722012-07-09 16:32:28 +0000398
399 return true;
robertphillips@google.comf4c2c522012-04-27 12:08:47 +0000400}
Robert Phillips17dc6582021-08-17 11:57:31 -0400401
402} // namespace skgpu::v1