blob: d68d4c2d98f33674bb16fc601f8627247eac4590 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
Robert Phillips777707b2018-01-17 11:40:14 -050012#include "GrProxyProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060013#include "SkMakeUnique.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060014#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050015#include "ccpr/GrCCClipProcessor.h"
Chris Daltond7e22272018-05-23 10:17:17 -060016#include "ccpr/GrCCDrawPathsOp.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060017#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060018#include "ccpr/GrCCPathParser.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060019
Chris Dalton5ba36ba2018-05-09 01:08:38 -060020using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070021
22// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
23// precision.
24static constexpr float kPathCropThreshold = 1 << 16;
25
26static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060027 SkPath cropboxPath;
28 cropboxPath.addRect(SkRect::Make(cropbox));
29 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
Chris Daltona32a3c32017-12-05 10:05:21 -070030 // This can fail if the PathOps encounter NaN or infinities.
31 out->reset();
32 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060033 out->setIsVolatile(true);
Chris Daltona32a3c32017-12-05 10:05:21 -070034}
Chris Dalton1a325d22017-07-14 15:17:41 -060035
Robert Phillips774168e2018-05-31 12:43:27 -040036
37GrCCPerOpListPaths::~GrCCPerOpListPaths() {
38 // Ensure there are no surviving DrawPathsOps with a dangling pointer into this class.
39 if (!fDrawOps.isEmpty()) {
40 SK_ABORT("GrCCDrawPathsOp(s) not deleted during flush");
41 }
42 // Clip lazy proxies also reference this class from their callbacks, but those callbacks
43 // are only invoked at flush time while we are still alive. (Unlike DrawPathsOps, that
44 // unregister themselves upon destruction.) So it shouldn't matter if any clip proxies
45 // are still around.
46}
47
Chris Dalton1a325d22017-07-14 15:17:41 -060048bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
49 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050050 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
51 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060052 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050053 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Dalton4da70192018-06-18 09:51:36 -060054 caps.isConfigTexturable(kAlpha_8_GrPixelConfig) &&
55 caps.isConfigRenderable(kAlpha_8_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060056 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060057}
58
Chris Dalton383a2ef2018-01-08 17:21:41 -050059sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Daltona2b5b642018-06-24 13:08:57 -060060 const GrCaps& caps, AllowCaching allowCaching) {
61 return sk_sp<GrCoverageCountingPathRenderer>(
62 IsSupported(caps) ? new GrCoverageCountingPathRenderer(allowCaching) : nullptr);
63}
64
65GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(AllowCaching allowCaching) {
66 if (AllowCaching::kYes == allowCaching) {
67 fPathCache = skstd::make_unique<GrCCPathCache>();
68 }
69}
70
71GrCoverageCountingPathRenderer::~GrCoverageCountingPathRenderer() {
72 // Ensure callers are actually flushing paths they record, not causing us to leak memory.
73 SkASSERT(fPendingPaths.empty());
74 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -060075}
76
Chris Daltond7e22272018-05-23 10:17:17 -060077GrCCPerOpListPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opListID) {
78 auto it = fPendingPaths.find(opListID);
79 if (fPendingPaths.end() == it) {
Robert Phillips774168e2018-05-31 12:43:27 -040080 sk_sp<GrCCPerOpListPaths> paths = sk_make_sp<GrCCPerOpListPaths>();
Chris Daltond7e22272018-05-23 10:17:17 -060081 it = fPendingPaths.insert(std::make_pair(opListID, std::move(paths))).first;
82 }
83 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060084}
85
Chris Dalton383a2ef2018-01-08 17:21:41 -050086GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
87 const CanDrawPathArgs& args) const {
Chris Dalton383a2ef2018-01-08 17:21:41 -050088 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
89 args.fViewMatrix->hasPerspective() || GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060090 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060091 }
92
93 SkPath path;
94 args.fShape->asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060095
Chris Daltondb91c6e2017-09-08 16:25:08 -060096 SkRect devBounds;
Chris Daltondb91c6e2017-09-08 16:25:08 -060097 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -060098
99 SkIRect clippedIBounds;
100 devBounds.roundOut(&clippedIBounds);
101 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
Chris Daltondb91c6e2017-09-08 16:25:08 -0600102 // Path is completely clipped away. Our code will eventually notice this before doing any
103 // real work.
104 return CanDrawPath::kYes;
105 }
106
Chris Daltona2b5b642018-06-24 13:08:57 -0600107 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
108 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
109 // This is a complicated path that has more vertices than pixels! Let's let the SW renderer
110 // have this one: It will probably be faster and a bitmap will require less total memory on
111 // the GPU than CCPR instance buffers would for the raw path data.
112 return CanDrawPath::kNo;
113 }
114
115 if (numPixels > 256 * 256) {
Chris Daltondb91c6e2017-09-08 16:25:08 -0600116 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
117 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
118 return CanDrawPath::kAsBackup;
119 }
120
121 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
122 // Complex paths do better cached in an SDF, if the renderer will accept them.
123 return CanDrawPath::kAsBackup;
124 }
125
Chris Dalton5ed44232017-09-07 13:22:46 -0600126 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -0600127}
128
129bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
130 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600131
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600132 SkIRect clipIBounds;
133 GrRenderTargetContext* rtc = args.fRenderTargetContext;
134 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
135
Chris Dalton1a325d22017-07-14 15:17:41 -0600136 SkRect devBounds;
Chris Dalton4da70192018-06-18 09:51:36 -0600137 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600138
Chris Dalton42c21152018-06-13 15:28:19 -0600139 std::unique_ptr<GrCCDrawPathsOp> op;
Chris Daltona32a3c32017-12-05 10:05:21 -0700140 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600141 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
142 SkPath croppedPath;
Chris Dalton4da70192018-06-18 09:51:36 -0600143 args.fShape->asPath(&croppedPath);
144 croppedPath.transform(*args.fViewMatrix, &croppedPath);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600145 crop_path(croppedPath, clipIBounds, &croppedPath);
Chris Dalton42c21152018-06-13 15:28:19 -0600146 // FIXME: This breaks local coords: http://skbug.com/8003
Chris Dalton4da70192018-06-18 09:51:36 -0600147 op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, SkMatrix::I(), GrShape(croppedPath),
Chris Dalton42c21152018-06-13 15:28:19 -0600148 croppedPath.getBounds(), std::move(args.fPaint));
Robert Phillips88a32ef2018-06-07 11:05:56 -0400149 } else {
Chris Dalton4da70192018-06-18 09:51:36 -0600150 op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, *args.fShape,
151 devBounds, std::move(args.fPaint));
Chris Dalton1a325d22017-07-14 15:17:41 -0600152 }
153
Chris Dalton42c21152018-06-13 15:28:19 -0600154 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600155 return true;
156}
157
Chris Dalton42c21152018-06-13 15:28:19 -0600158void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> opHolder,
159 const DrawPathArgs& args) {
160 if (GrCCDrawPathsOp* op = opHolder.get()) {
161 GrRenderTargetContext* rtc = args.fRenderTargetContext;
162 if (uint32_t opListID = rtc->addDrawOp(*args.fClip, std::move(opHolder))) {
163 op->wasRecorded(this->lookupPendingPaths(opListID));
164 }
165 }
166}
167
Chris Dalton383a2ef2018-01-08 17:21:41 -0500168std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Chris Dalton4c458b12018-06-16 17:22:59 -0600169 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
170 int rtHeight, const GrCaps& caps) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500171 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700172
173 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700174
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600175 GrCCClipPath& clipPath =
Chris Daltond7e22272018-05-23 10:17:17 -0600176 this->lookupPendingPaths(opListID)->fClipPaths[deviceSpacePath.getGenerationID()];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600177 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700178 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600179 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
180 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
181 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
182 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600183 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600184 crop_path(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Dalton4c458b12018-06-16 17:22:59 -0600185 clipPath.init(croppedPath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600186 } else {
Chris Dalton4c458b12018-06-16 17:22:59 -0600187 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600188 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700189 } else {
190 clipPath.addAccess(accessRect);
191 }
192
193 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500194 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
195 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700196}
197
Chris Dalton1a325d22017-07-14 15:17:41 -0600198void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
199 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton9414c962018-06-14 10:14:50 -0600200 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton4da70192018-06-18 09:51:36 -0600201 using DoCopiesToCache = GrCCDrawPathsOp::DoCopiesToCache;
Chris Daltona32a3c32017-12-05 10:05:21 -0700202 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600203 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500204 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700205
Chris Dalton4da70192018-06-18 09:51:36 -0600206 // Dig up the stashed atlas from the previous flush (if any) so we can attempt to copy any
207 // reusable paths out of it and into the resource cache. We also need to clear its unique key.
208 sk_sp<GrTextureProxy> stashedAtlasProxy;
209 if (fStashedAtlasKey.isValid()) {
210 stashedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(fStashedAtlasKey,
211 GrCCAtlas::kTextureOrigin);
212 if (stashedAtlasProxy) {
213 // Instantiate the proxy so we can clear the underlying texture's unique key.
214 onFlushRP->instatiateProxy(stashedAtlasProxy.get());
215 onFlushRP->removeUniqueKeyFromProxy(fStashedAtlasKey, stashedAtlasProxy.get());
216 } else {
217 fStashedAtlasKey.reset(); // Indicate there is no stashed atlas to copy from.
218 }
219 }
220
Chris Daltond7e22272018-05-23 10:17:17 -0600221 if (fPendingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600222 fStashedAtlasKey.reset();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500223 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700224 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600225
Chris Dalton4da70192018-06-18 09:51:36 -0600226 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600227 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600228 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = SkTMin(2048, maxPreferredRTSize);
229 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
230 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Chris Daltona2b5b642018-06-24 13:08:57 -0600231 specs.fRenderedAtlasSpecs.fMinTextureSize = SkTMin(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600232
Chris Daltond7e22272018-05-23 10:17:17 -0600233 // Move the per-opList paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600234 // and count them up so we can preallocate buffers.
Chris Daltond7e22272018-05-23 10:17:17 -0600235 fFlushingPaths.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600236 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltond7e22272018-05-23 10:17:17 -0600237 auto iter = fPendingPaths.find(opListIDs[i]);
238 if (fPendingPaths.end() == iter) {
239 continue; // No paths on this opList.
Chris Dalton1a325d22017-07-14 15:17:41 -0600240 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700241
tzikbdb49562018-05-28 14:58:00 +0900242 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600243 fPendingPaths.erase(iter);
244
Chris Dalton4da70192018-06-18 09:51:36 -0600245 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Daltona2b5b642018-06-24 13:08:57 -0600246 op->accountForOwnPaths(fPathCache.get(), onFlushRP, fStashedAtlasKey, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700247 }
Chris Daltond7e22272018-05-23 10:17:17 -0600248 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600249 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700250 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600251 }
Chris Dalton4da70192018-06-18 09:51:36 -0600252 fStashedAtlasKey.reset();
Chris Dalton1a325d22017-07-14 15:17:41 -0600253
Chris Dalton4da70192018-06-18 09:51:36 -0600254 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500255 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600256 }
257
Chris Dalton4da70192018-06-18 09:51:36 -0600258 // Determine if there are enough reusable paths from last flush for it to be worth our time to
259 // copy them to cached atlas(es).
260 DoCopiesToCache doCopies = DoCopiesToCache(specs.fNumCopiedPaths > 100 ||
Chris Daltona2b5b642018-06-24 13:08:57 -0600261 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
Chris Dalton4da70192018-06-18 09:51:36 -0600262 if (specs.fNumCopiedPaths && DoCopiesToCache::kNo == doCopies) {
263 specs.convertCopiesToRenders();
264 SkASSERT(!specs.fNumCopiedPaths);
265 }
266
267 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600268 if (!resources->isMapped()) {
269 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600270 }
271
Chris Dalton4da70192018-06-18 09:51:36 -0600272 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600273 for (const auto& flushingPaths : fFlushingPaths) {
274 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton4da70192018-06-18 09:51:36 -0600275 op->setupResources(onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600276 }
Chris Daltond7e22272018-05-23 10:17:17 -0600277 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600278 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600279 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600280 }
281
Chris Dalton4da70192018-06-18 09:51:36 -0600282 // Allocate resources and then render the atlas(es).
283 if (!resources->finalize(onFlushRP, std::move(stashedAtlasProxy), out)) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600284 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600285 }
Chris Dalton4da70192018-06-18 09:51:36 -0600286 // Verify the stashed atlas got released so its texture could be recycled.
287 SkASSERT(!stashedAtlasProxy);
Chris Dalton1a325d22017-07-14 15:17:41 -0600288
Chris Daltond7e22272018-05-23 10:17:17 -0600289 // Commit flushing paths to the resources once they are successfully completed.
290 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400291 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600292 flushingPaths->fFlushResources = resources;
293 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600294}
295
Chris Dalton3968ff92017-11-27 12:26:31 -0700296void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
297 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600298 SkASSERT(fFlushing);
Chris Dalton4da70192018-06-18 09:51:36 -0600299 SkASSERT(!fStashedAtlasKey.isValid()); // Should have been cleared in preFlush().
Robert Phillips774168e2018-05-31 12:43:27 -0400300
Chris Dalton4da70192018-06-18 09:51:36 -0600301 if (!fFlushingPaths.empty()) {
302 // Note the stashed atlas's key for next flush, if any.
303 auto resources = fFlushingPaths.front()->fFlushResources.get();
304 if (resources && resources->hasStashedAtlas()) {
305 fStashedAtlasKey = resources->stashedAtlasKey();
306 }
307
308 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
309 // resources manually.
310 for (auto& flushingPaths : fFlushingPaths) {
311 flushingPaths->fFlushResources = nullptr;
312 }
313
314 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
315 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400316 }
317
Chris Dalton383a2ef2018-01-08 17:21:41 -0500318 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600319}