blob: 37433ff0c4970778148c18d3bf188422b5fd8d81 [file] [log] [blame]
robertphillips@google.comf4c2c522012-04-27 12:08:47 +00001/*
2 * Copyright 2012 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrSoftwarePathRenderer.h"
robertphillips976f5f02016-06-03 10:59:20 -07009#include "GrAuditTrail.h"
10#include "GrClip.h"
Brian Osmanf9810662017-08-30 10:02:10 -040011#include "GrContextPriv.h"
bsalomon39ef7fb2016-09-21 11:16:05 -070012#include "GrGpuResourcePriv.h"
Brian Osmanf9810662017-08-30 10:02:10 -040013#include "GrOpFlushState.h"
14#include "GrOpList.h"
Brian Osman32342f02017-03-04 08:12:46 -050015#include "GrResourceProvider.h"
robertphillips@google.com58b20212012-06-27 20:44:52 +000016#include "GrSWMaskHelper.h"
Brian Osmanf9810662017-08-30 10:02:10 -040017#include "SkMakeUnique.h"
18#include "SkSemaphore.h"
19#include "SkTaskGroup.h"
20#include "SkTraceEvent.h"
Robert Phillips009e9af2017-06-15 14:01:04 -040021#include "ops/GrDrawOp.h"
Brian Salomonbaaf4392017-06-15 09:59:23 -040022#include "ops/GrRectOpFactory.h"
robertphillips@google.comf4c2c522012-04-27 12:08:47 +000023
robertphillips@google.comed4155d2012-05-01 14:30:24 +000024////////////////////////////////////////////////////////////////////////////////
Chris Dalton5ed44232017-09-07 13:22:46 -060025GrPathRenderer::CanDrawPath
26GrSoftwarePathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
bsalomon8acedde2016-06-24 10:42:16 -070027 // Pass on any style that applies. The caller will apply the style if a suitable renderer is
28 // not found and try again with the new GrShape.
Chris Dalton5ed44232017-09-07 13:22:46 -060029 if (!args.fShape->style().applies() && SkToBool(fResourceProvider) &&
30 (args.fAAType == GrAAType::kCoverage || args.fAAType == GrAAType::kNone)) {
31 // This is the fallback renderer for when a path is too complicated for the GPU ones.
32 return CanDrawPath::kAsBackup;
33 }
34 return CanDrawPath::kNo;
robertphillips@google.comf4c2c522012-04-27 12:08:47 +000035}
36
robertphillips@google.comed4155d2012-05-01 14:30:24 +000037////////////////////////////////////////////////////////////////////////////////
bsalomon39ef7fb2016-09-21 11:16:05 -070038static bool get_unclipped_shape_dev_bounds(const GrShape& shape, const SkMatrix& matrix,
39 SkIRect* devBounds) {
40 SkRect shapeBounds = shape.styledBounds();
41 if (shapeBounds.isEmpty()) {
42 return false;
43 }
44 SkRect shapeDevBounds;
45 matrix.mapRect(&shapeDevBounds, shapeBounds);
Brian Salomonc1c607e2016-12-20 11:41:43 -050046 // Even though these are "unclipped" bounds we still clip to the int32_t range.
47 // This is the largest int32_t that is representable exactly as a float. The next 63 larger ints
48 // would round down to this value when cast to a float, but who really cares.
49 // INT32_MIN is exactly representable.
50 static constexpr int32_t kMaxInt = 2147483520;
51 if (!shapeDevBounds.intersect(SkRect::MakeLTRB(INT32_MIN, INT32_MIN, kMaxInt, kMaxInt))) {
52 return false;
53 }
bsalomon39ef7fb2016-09-21 11:16:05 -070054 shapeDevBounds.roundOut(devBounds);
55 return true;
56}
57
58// Gets the shape bounds, the clip bounds, and the intersection (if any). Returns false if there
59// is no intersection.
60static bool get_shape_and_clip_bounds(int width, int height,
61 const GrClip& clip,
62 const GrShape& shape,
63 const SkMatrix& matrix,
64 SkIRect* unclippedDevShapeBounds,
65 SkIRect* clippedDevShapeBounds,
66 SkIRect* devClipBounds) {
robertphillips@google.comed4155d2012-05-01 14:30:24 +000067 // compute bounds as intersection of rt size, clip, and path
robertphillips0152d732016-05-20 06:38:43 -070068 clip.getConservativeBounds(width, height, devClipBounds);
robertphillips@google.com7b112892012-07-31 15:18:21 +000069
bsalomon39ef7fb2016-09-21 11:16:05 -070070 if (!get_unclipped_shape_dev_bounds(shape, matrix, unclippedDevShapeBounds)) {
71 *unclippedDevShapeBounds = SkIRect::EmptyIRect();
72 *clippedDevShapeBounds = SkIRect::EmptyIRect();
robertphillips@google.com3e11c0b2012-07-11 18:20:35 +000073 return false;
robertphillips@google.comed4155d2012-05-01 14:30:24 +000074 }
bsalomon39ef7fb2016-09-21 11:16:05 -070075 if (!clippedDevShapeBounds->intersect(*devClipBounds, *unclippedDevShapeBounds)) {
76 *clippedDevShapeBounds = SkIRect::EmptyIRect();
robertphillips@google.comed4155d2012-05-01 14:30:24 +000077 return false;
78 }
79 return true;
80}
81
82////////////////////////////////////////////////////////////////////////////////
robertphillips976f5f02016-06-03 10:59:20 -070083
Brian Osman11052242016-10-27 14:47:55 -040084void GrSoftwarePathRenderer::DrawNonAARect(GrRenderTargetContext* renderTargetContext,
Brian Salomon82f44312017-01-11 13:42:54 -050085 GrPaint&& paint,
robertphillipsd2b6d642016-07-21 08:55:08 -070086 const GrUserStencilSettings& userStencilSettings,
robertphillips976f5f02016-06-03 10:59:20 -070087 const GrClip& clip,
robertphillips976f5f02016-06-03 10:59:20 -070088 const SkMatrix& viewMatrix,
89 const SkRect& rect,
90 const SkMatrix& localMatrix) {
Brian Salomonbaaf4392017-06-15 09:59:23 -040091 renderTargetContext->addDrawOp(clip,
92 GrRectOpFactory::MakeNonAAFillWithLocalMatrix(
93 std::move(paint), viewMatrix, localMatrix, rect,
94 GrAAType::kNone, &userStencilSettings));
robertphillips976f5f02016-06-03 10:59:20 -070095}
96
Brian Osman11052242016-10-27 14:47:55 -040097void GrSoftwarePathRenderer::DrawAroundInvPath(GrRenderTargetContext* renderTargetContext,
Brian Salomon82f44312017-01-11 13:42:54 -050098 GrPaint&& paint,
robertphillipsd2b6d642016-07-21 08:55:08 -070099 const GrUserStencilSettings& userStencilSettings,
robertphillips976f5f02016-06-03 10:59:20 -0700100 const GrClip& clip,
robertphillips976f5f02016-06-03 10:59:20 -0700101 const SkMatrix& viewMatrix,
102 const SkIRect& devClipBounds,
103 const SkIRect& devPathBounds) {
joshualittd27f73e2014-12-29 07:43:36 -0800104 SkMatrix invert;
joshualitt8059eb92014-12-29 15:10:07 -0800105 if (!viewMatrix.invert(&invert)) {
bsalomon@google.come3d32162012-07-20 13:37:06 +0000106 return;
107 }
joshualittd27f73e2014-12-29 07:43:36 -0800108
commit-bot@chromium.orgfd03d4a2013-07-17 21:39:42 +0000109 SkRect rect;
robertphillips@google.com7b112892012-07-31 15:18:21 +0000110 if (devClipBounds.fTop < devPathBounds.fTop) {
rmistry@google.comd6176b02012-08-23 18:14:13 +0000111 rect.iset(devClipBounds.fLeft, devClipBounds.fTop,
robertphillips@google.com7b112892012-07-31 15:18:21 +0000112 devClipBounds.fRight, devPathBounds.fTop);
Brian Salomonb74ef032017-08-10 12:46:01 -0400113 DrawNonAARect(renderTargetContext, GrPaint::Clone(paint), userStencilSettings, clip,
114 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000115 }
robertphillips@google.com7b112892012-07-31 15:18:21 +0000116 if (devClipBounds.fLeft < devPathBounds.fLeft) {
rmistry@google.comd6176b02012-08-23 18:14:13 +0000117 rect.iset(devClipBounds.fLeft, devPathBounds.fTop,
robertphillips@google.com7b112892012-07-31 15:18:21 +0000118 devPathBounds.fLeft, devPathBounds.fBottom);
Brian Salomonb74ef032017-08-10 12:46:01 -0400119 DrawNonAARect(renderTargetContext, GrPaint::Clone(paint), userStencilSettings, clip,
120 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000121 }
robertphillips@google.com7b112892012-07-31 15:18:21 +0000122 if (devClipBounds.fRight > devPathBounds.fRight) {
rmistry@google.comd6176b02012-08-23 18:14:13 +0000123 rect.iset(devPathBounds.fRight, devPathBounds.fTop,
robertphillips@google.com7b112892012-07-31 15:18:21 +0000124 devClipBounds.fRight, devPathBounds.fBottom);
Brian Salomonb74ef032017-08-10 12:46:01 -0400125 DrawNonAARect(renderTargetContext, GrPaint::Clone(paint), userStencilSettings, clip,
126 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000127 }
robertphillips@google.com7b112892012-07-31 15:18:21 +0000128 if (devClipBounds.fBottom > devPathBounds.fBottom) {
rmistry@google.comd6176b02012-08-23 18:14:13 +0000129 rect.iset(devClipBounds.fLeft, devPathBounds.fBottom,
robertphillips@google.com7b112892012-07-31 15:18:21 +0000130 devClipBounds.fRight, devClipBounds.fBottom);
Brian Salomon82f44312017-01-11 13:42:54 -0500131 DrawNonAARect(renderTargetContext, std::move(paint), userStencilSettings, clip,
robertphillips976f5f02016-06-03 10:59:20 -0700132 SkMatrix::I(), rect, invert);
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000133 }
134}
135
Brian Osmanc7da1462017-08-17 16:14:25 -0400136void GrSoftwarePathRenderer::DrawToTargetWithShapeMask(
137 sk_sp<GrTextureProxy> proxy,
138 GrRenderTargetContext* renderTargetContext,
139 GrPaint&& paint,
140 const GrUserStencilSettings& userStencilSettings,
141 const GrClip& clip,
142 const SkMatrix& viewMatrix,
143 const SkIPoint& textureOriginInDeviceSpace,
144 const SkIRect& deviceSpaceRectToDraw) {
145 SkMatrix invert;
146 if (!viewMatrix.invert(&invert)) {
147 return;
148 }
149
150 SkRect dstRect = SkRect::Make(deviceSpaceRectToDraw);
151
152 // We use device coords to compute the texture coordinates. We take the device coords and apply
153 // a translation so that the top-left of the device bounds maps to 0,0, and then a scaling
154 // matrix to normalized coords.
155 SkMatrix maskMatrix = SkMatrix::MakeTrans(SkIntToScalar(-textureOriginInDeviceSpace.fX),
156 SkIntToScalar(-textureOriginInDeviceSpace.fY));
157 maskMatrix.preConcat(viewMatrix);
158 paint.addCoverageFragmentProcessor(GrSimpleTextureEffect::Make(
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400159 std::move(proxy), nullptr, maskMatrix, GrSamplerState::Filter::kNearest));
Brian Osmanf9810662017-08-30 10:02:10 -0400160 DrawNonAARect(renderTargetContext, std::move(paint), userStencilSettings, clip, SkMatrix::I(),
161 dstRect, invert);
162}
163
164static sk_sp<GrTextureProxy> make_deferred_mask_texture_proxy(GrContext* context, SkBackingFit fit,
165 int width, int height) {
166 GrSurfaceDesc desc;
167 desc.fOrigin = kTopLeft_GrSurfaceOrigin;
168 desc.fWidth = width;
169 desc.fHeight = height;
170 desc.fConfig = kAlpha_8_GrPixelConfig;
171
172 sk_sp<GrSurfaceContext> sContext =
173 context->contextPriv().makeDeferredSurfaceContext(desc, fit, SkBudgeted::kYes);
174 if (!sContext || !sContext->asTextureProxy()) {
175 return nullptr;
176 }
177 return sContext->asTextureProxyRef();
178}
179
180namespace {
181
182class GrMaskUploaderPrepareCallback : public GrPrepareCallback {
183public:
184 GrMaskUploaderPrepareCallback(sk_sp<GrTextureProxy> proxy, const SkIRect& maskBounds,
185 const SkMatrix& viewMatrix, const GrShape& shape, GrAA aa)
186 : fProxy(std::move(proxy))
187 , fMaskBounds(maskBounds)
188 , fViewMatrix(viewMatrix)
189 , fShape(shape)
190 , fAA(aa)
191 , fWaited(false) {}
192
193 ~GrMaskUploaderPrepareCallback() override {
194 if (!fWaited) {
195 // This can happen if our owning op list fails to instantiate (so it never prepares)
196 fPixelsReady.wait();
197 }
198 }
199
200 void operator()(GrOpFlushState* flushState) override {
201 TRACE_EVENT0("skia", "Mask Uploader Pre Flush Callback");
202 auto uploadMask = [this](GrDrawOp::WritePixelsFn& writePixelsFn) {
203 TRACE_EVENT0("skia", "Mask Upload");
204 this->fPixelsReady.wait();
205 this->fWaited = true;
206 // If the worker thread was unable to allocate pixels, this check will fail, and we'll
207 // end up drawing with an uninitialized mask texture, but at least we won't crash.
208 if (this->fPixels.addr()) {
209 writePixelsFn(this->fProxy.get(), 0, 0,
210 this->fPixels.width(), this->fPixels.height(),
211 kAlpha_8_GrPixelConfig,
212 this->fPixels.addr(), this->fPixels.rowBytes());
Brian Osmand41dc172017-09-01 11:40:08 -0400213 // Free this memory immediately, so it can be recycled. This avoids memory pressure
214 // when there is a large amount of threaded work still running during flush.
215 this->fPixels.reset();
Brian Osmanf9810662017-08-30 10:02:10 -0400216 }
217 };
218 flushState->addASAPUpload(std::move(uploadMask));
219 }
220
221 SkAutoPixmapStorage* getPixels() { return &fPixels; }
222 SkSemaphore* getSemaphore() { return &fPixelsReady; }
223 const SkIRect& getMaskBounds() const { return fMaskBounds; }
224 const SkMatrix* getViewMatrix() const { return &fViewMatrix; }
225 const GrShape& getShape() const { return fShape; }
226 GrAA getAA() const { return fAA; }
227
228private:
229 // NOTE: This ref cnt isn't thread safe!
230 sk_sp<GrTextureProxy> fProxy;
231 SkAutoPixmapStorage fPixels;
232 SkSemaphore fPixelsReady;
233
234 SkIRect fMaskBounds;
235 SkMatrix fViewMatrix;
236 GrShape fShape;
237 GrAA fAA;
238 bool fWaited;
239};
240
Brian Osmanc7da1462017-08-17 16:14:25 -0400241}
242
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000243////////////////////////////////////////////////////////////////////////////////
244// return true on success; false on failure
bsalomon0aff2fa2015-07-31 06:48:27 -0700245bool GrSoftwarePathRenderer::onDrawPath(const DrawPathArgs& args) {
Brian Osman11052242016-10-27 14:47:55 -0400246 GR_AUDIT_TRAIL_AUTO_FRAME(args.fRenderTargetContext->auditTrail(),
robertphillips976f5f02016-06-03 10:59:20 -0700247 "GrSoftwarePathRenderer::onDrawPath");
Brian Osman32342f02017-03-04 08:12:46 -0500248 if (!fResourceProvider) {
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000249 return false;
250 }
251
bsalomon8acedde2016-06-24 10:42:16 -0700252 // We really need to know if the shape will be inverse filled or not
253 bool inverseFilled = false;
254 SkTLazy<GrShape> tmpShape;
caryclarkd6562002016-07-27 12:02:07 -0700255 SkASSERT(!args.fShape->style().applies());
Eric Karl5c779752017-05-08 12:02:07 -0700256 // If the path is hairline, ignore inverse fill.
257 inverseFilled = args.fShape->inverseFilled() &&
258 !IsStrokeHairlineOrEquivalent(args.fShape->style(), *args.fViewMatrix, nullptr);
bsalomon8acedde2016-06-24 10:42:16 -0700259
bsalomon39ef7fb2016-09-21 11:16:05 -0700260 SkIRect unclippedDevShapeBounds, clippedDevShapeBounds, devClipBounds;
261 // To prevent overloading the cache with entries during animations we limit the cache of masks
262 // to cases where the matrix preserves axis alignment.
263 bool useCache = fAllowCaching && !inverseFilled && args.fViewMatrix->preservesAxisAlignment() &&
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500264 args.fShape->hasUnstyledKey() && GrAAType::kCoverage == args.fAAType;
bsalomon39ef7fb2016-09-21 11:16:05 -0700265
Brian Osman11052242016-10-27 14:47:55 -0400266 if (!get_shape_and_clip_bounds(args.fRenderTargetContext->width(),
267 args.fRenderTargetContext->height(),
bsalomon8acedde2016-06-24 10:42:16 -0700268 *args.fClip, *args.fShape,
bsalomon39ef7fb2016-09-21 11:16:05 -0700269 *args.fViewMatrix, &unclippedDevShapeBounds,
270 &clippedDevShapeBounds,
271 &devClipBounds)) {
bsalomon8acedde2016-06-24 10:42:16 -0700272 if (inverseFilled) {
Brian Salomon82f44312017-01-11 13:42:54 -0500273 DrawAroundInvPath(args.fRenderTargetContext, std::move(args.fPaint),
274 *args.fUserStencilSettings, *args.fClip, *args.fViewMatrix,
275 devClipBounds, unclippedDevShapeBounds);
bsalomon@google.com276c1fa2012-06-19 13:22:45 +0000276 }
277 return true;
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000278 }
robertphillips@google.com366f1c62012-06-29 21:38:47 +0000279
bsalomon39ef7fb2016-09-21 11:16:05 -0700280 const SkIRect* boundsForMask = &clippedDevShapeBounds;
281 if (useCache) {
282 // Use the cache only if >50% of the path is visible.
283 int unclippedWidth = unclippedDevShapeBounds.width();
284 int unclippedHeight = unclippedDevShapeBounds.height();
285 int unclippedArea = unclippedWidth * unclippedHeight;
286 int clippedArea = clippedDevShapeBounds.width() * clippedDevShapeBounds.height();
Brian Osman11052242016-10-27 14:47:55 -0400287 int maxTextureSize = args.fRenderTargetContext->caps()->maxTextureSize();
bsalomon39ef7fb2016-09-21 11:16:05 -0700288 if (unclippedArea > 2 * clippedArea || unclippedWidth > maxTextureSize ||
289 unclippedHeight > maxTextureSize) {
290 useCache = false;
291 } else {
292 boundsForMask = &unclippedDevShapeBounds;
293 }
robertphillips@google.comed4155d2012-05-01 14:30:24 +0000294 }
295
bsalomon39ef7fb2016-09-21 11:16:05 -0700296 GrUniqueKey maskKey;
bsalomon39ef7fb2016-09-21 11:16:05 -0700297 if (useCache) {
298 // We require the upper left 2x2 of the matrix to match exactly for a cache hit.
299 SkScalar sx = args.fViewMatrix->get(SkMatrix::kMScaleX);
300 SkScalar sy = args.fViewMatrix->get(SkMatrix::kMScaleY);
301 SkScalar kx = args.fViewMatrix->get(SkMatrix::kMSkewX);
302 SkScalar ky = args.fViewMatrix->get(SkMatrix::kMSkewY);
Stan Iliev67cd6732017-08-15 17:10:26 -0400303 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
304#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
305 // Fractional translate does not affect caching on Android. This is done for better cache
306 // hit ratio and speed, but it is matching HWUI behavior, which doesn't consider the matrix
307 // at all when caching paths.
308 GrUniqueKey::Builder builder(&maskKey, kDomain, 4 + args.fShape->unstyledKeySize());
309#else
bsalomon39ef7fb2016-09-21 11:16:05 -0700310 SkScalar tx = args.fViewMatrix->get(SkMatrix::kMTransX);
311 SkScalar ty = args.fViewMatrix->get(SkMatrix::kMTransY);
312 // Allow 8 bits each in x and y of subpixel positioning.
313 SkFixed fracX = SkScalarToFixed(SkScalarFraction(tx)) & 0x0000FF00;
314 SkFixed fracY = SkScalarToFixed(SkScalarFraction(ty)) & 0x0000FF00;
bsalomon39ef7fb2016-09-21 11:16:05 -0700315 GrUniqueKey::Builder builder(&maskKey, kDomain, 5 + args.fShape->unstyledKeySize());
Stan Iliev67cd6732017-08-15 17:10:26 -0400316#endif
bsalomon39ef7fb2016-09-21 11:16:05 -0700317 builder[0] = SkFloat2Bits(sx);
318 builder[1] = SkFloat2Bits(sy);
319 builder[2] = SkFloat2Bits(kx);
320 builder[3] = SkFloat2Bits(ky);
Stan Iliev67cd6732017-08-15 17:10:26 -0400321#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
322 args.fShape->writeUnstyledKey(&builder[4]);
323#else
bsalomon39ef7fb2016-09-21 11:16:05 -0700324 builder[4] = fracX | (fracY >> 8);
325 args.fShape->writeUnstyledKey(&builder[5]);
Stan Iliev67cd6732017-08-15 17:10:26 -0400326#endif
bsalomon39ef7fb2016-09-21 11:16:05 -0700327 }
328
Robert Phillipsd3749482017-03-14 09:17:43 -0400329 sk_sp<GrTextureProxy> proxy;
bsalomon39ef7fb2016-09-21 11:16:05 -0700330 if (useCache) {
Robert Phillips066f0202017-07-25 10:16:35 -0400331 proxy = fResourceProvider->findProxyByUniqueKey(maskKey, kTopLeft_GrSurfaceOrigin);
bsalomon39ef7fb2016-09-21 11:16:05 -0700332 }
Robert Phillipsd3749482017-03-14 09:17:43 -0400333 if (!proxy) {
Robert Phillips417b7f42016-12-14 09:12:13 -0500334 SkBackingFit fit = useCache ? SkBackingFit::kExact : SkBackingFit::kApprox;
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500335 GrAA aa = GrAAType::kCoverage == args.fAAType ? GrAA::kYes : GrAA::kNo;
Brian Osmanf9810662017-08-30 10:02:10 -0400336
337 SkTaskGroup* taskGroup = args.fContext->contextPriv().getTaskGroup();
338 if (taskGroup) {
339 proxy = make_deferred_mask_texture_proxy(args.fContext, fit,
340 boundsForMask->width(),
341 boundsForMask->height());
342 if (!proxy) {
343 return false;
344 }
345
346 auto uploader = skstd::make_unique<GrMaskUploaderPrepareCallback>(
347 proxy, *boundsForMask, *args.fViewMatrix, *args.fShape, aa);
348 GrMaskUploaderPrepareCallback* uploaderRaw = uploader.get();
349
350 auto drawAndUploadMask = [uploaderRaw] {
351 TRACE_EVENT0("skia", "Threaded SW Mask Render");
352 GrSWMaskHelper helper(uploaderRaw->getPixels());
Brian Salomon74077562017-08-30 13:55:35 -0400353 if (helper.init(uploaderRaw->getMaskBounds())) {
354 helper.drawShape(uploaderRaw->getShape(), *uploaderRaw->getViewMatrix(),
355 SkRegion::kReplace_Op, uploaderRaw->getAA(), 0xFF);
Brian Osmanf9810662017-08-30 10:02:10 -0400356 } else {
357 SkDEBUGFAIL("Unable to allocate SW mask.");
358 }
359 uploaderRaw->getSemaphore()->signal();
360 };
361 taskGroup->add(std::move(drawAndUploadMask));
362 args.fRenderTargetContext->getOpList()->addPrepareCallback(std::move(uploader));
363 } else {
364 GrSWMaskHelper helper;
Brian Salomon74077562017-08-30 13:55:35 -0400365 if (!helper.init(*boundsForMask)) {
Brian Osmanf9810662017-08-30 10:02:10 -0400366 return false;
367 }
Brian Salomon74077562017-08-30 13:55:35 -0400368 helper.drawShape(*args.fShape, *args.fViewMatrix, SkRegion::kReplace_Op, aa, 0xFF);
Brian Osmanf9810662017-08-30 10:02:10 -0400369 proxy = helper.toTextureProxy(args.fContext, fit);
370 }
371
Robert Phillipsd3749482017-03-14 09:17:43 -0400372 if (!proxy) {
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500373 return false;
374 }
375 if (useCache) {
Robert Phillipse44ef102017-07-21 15:37:19 -0400376 SkASSERT(proxy->origin() == kTopLeft_GrSurfaceOrigin);
Robert Phillipsd3749482017-03-14 09:17:43 -0400377 fResourceProvider->assignUniqueKeyToProxy(maskKey, proxy.get());
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500378 }
bsalomon39ef7fb2016-09-21 11:16:05 -0700379 }
bsalomon8acedde2016-06-24 10:42:16 -0700380 if (inverseFilled) {
Brian Salomonb74ef032017-08-10 12:46:01 -0400381 DrawAroundInvPath(args.fRenderTargetContext, GrPaint::Clone(args.fPaint),
Brian Salomon82f44312017-01-11 13:42:54 -0500382 *args.fUserStencilSettings, *args.fClip, *args.fViewMatrix, devClipBounds,
383 unclippedDevShapeBounds);
robertphillips@google.com5dfb6722012-07-09 16:32:28 +0000384 }
Brian Osmanc7da1462017-08-17 16:14:25 -0400385 DrawToTargetWithShapeMask(
Robert Phillips296b1cc2017-03-15 10:42:12 -0400386 std::move(proxy), args.fRenderTargetContext, std::move(args.fPaint),
Brian Salomon82f44312017-01-11 13:42:54 -0500387 *args.fUserStencilSettings, *args.fClip, *args.fViewMatrix,
388 SkIPoint{boundsForMask->fLeft, boundsForMask->fTop}, *boundsForMask);
robertphillips@google.com5dfb6722012-07-09 16:32:28 +0000389
390 return true;
robertphillips@google.comf4c2c522012-04-27 12:08:47 +0000391}