blob: 0899690ba18d6a587f7cd0fdebb06835f6d5b253 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
Robert Phillips777707b2018-01-17 11:40:14 -050012#include "GrProxyProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060013#include "SkMakeUnique.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060014#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050015#include "ccpr/GrCCClipProcessor.h"
Chris Daltond7e22272018-05-23 10:17:17 -060016#include "ccpr/GrCCDrawPathsOp.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060017#include "ccpr/GrCCPathCache.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
Robert Phillips774168e2018-05-31 12:43:27 -040021GrCCPerOpListPaths::~GrCCPerOpListPaths() {
22 // Ensure there are no surviving DrawPathsOps with a dangling pointer into this class.
23 if (!fDrawOps.isEmpty()) {
24 SK_ABORT("GrCCDrawPathsOp(s) not deleted during flush");
25 }
26 // Clip lazy proxies also reference this class from their callbacks, but those callbacks
27 // are only invoked at flush time while we are still alive. (Unlike DrawPathsOps, that
28 // unregister themselves upon destruction.) So it shouldn't matter if any clip proxies
29 // are still around.
30}
31
Chris Dalton1a325d22017-07-14 15:17:41 -060032bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
33 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050034 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
35 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060036 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050037 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Dalton4da70192018-06-18 09:51:36 -060038 caps.isConfigTexturable(kAlpha_8_GrPixelConfig) &&
39 caps.isConfigRenderable(kAlpha_8_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060040 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060041}
42
Chris Dalton383a2ef2018-01-08 17:21:41 -050043sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Daltona2b5b642018-06-24 13:08:57 -060044 const GrCaps& caps, AllowCaching allowCaching) {
45 return sk_sp<GrCoverageCountingPathRenderer>(
46 IsSupported(caps) ? new GrCoverageCountingPathRenderer(allowCaching) : nullptr);
47}
48
49GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(AllowCaching allowCaching) {
50 if (AllowCaching::kYes == allowCaching) {
51 fPathCache = skstd::make_unique<GrCCPathCache>();
52 }
53}
54
55GrCoverageCountingPathRenderer::~GrCoverageCountingPathRenderer() {
56 // Ensure callers are actually flushing paths they record, not causing us to leak memory.
57 SkASSERT(fPendingPaths.empty());
58 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -060059}
60
Chris Daltond7e22272018-05-23 10:17:17 -060061GrCCPerOpListPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opListID) {
62 auto it = fPendingPaths.find(opListID);
63 if (fPendingPaths.end() == it) {
Robert Phillips774168e2018-05-31 12:43:27 -040064 sk_sp<GrCCPerOpListPaths> paths = sk_make_sp<GrCCPerOpListPaths>();
Chris Daltond7e22272018-05-23 10:17:17 -060065 it = fPendingPaths.insert(std::make_pair(opListID, std::move(paths))).first;
66 }
67 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060068}
69
Chris Dalton383a2ef2018-01-08 17:21:41 -050070GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
71 const CanDrawPathArgs& args) const {
Chris Dalton2f2757f2018-08-31 19:53:15 +080072 const GrShape& shape = *args.fShape;
73 if (GrAAType::kCoverage != args.fAAType || shape.style().hasPathEffect() ||
74 args.fViewMatrix->hasPerspective() || shape.inverseFilled()) {
Chris Dalton5ed44232017-09-07 13:22:46 -060075 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060076 }
77
78 SkPath path;
Chris Dalton2f2757f2018-08-31 19:53:15 +080079 shape.asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060080
Chris Dalton2f2757f2018-08-31 19:53:15 +080081 switch (shape.style().strokeRec().getStyle()) {
82 case SkStrokeRec::kFill_Style: {
83 SkRect devBounds;
84 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -060085
Chris Dalton2f2757f2018-08-31 19:53:15 +080086 SkIRect clippedIBounds;
87 devBounds.roundOut(&clippedIBounds);
88 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
89 // The path is completely clipped away. Our code will eventually notice this before
90 // doing any real work.
91 return CanDrawPath::kYes;
92 }
93
94 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
95 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
96 // This is a complicated path that has more vertices than pixels! Let's let the SW
97 // renderer have this one: It will probably be faster and a bitmap will require less
98 // total memory on the GPU than CCPR instance buffers would for the raw path data.
99 return CanDrawPath::kNo;
100 }
101
102 if (numPixels > 256 * 256) {
103 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass
104 // rendering algorithm. Give the simpler direct renderers a chance before we commit
105 // to drawing it.
106 return CanDrawPath::kAsBackup;
107 }
108
109 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
110 // Complex paths do better cached in an SDF, if the renderer will accept them.
111 return CanDrawPath::kAsBackup;
112 }
113
114 return CanDrawPath::kYes;
115 }
116
117 case SkStrokeRec::kStroke_Style:
118 if (!args.fViewMatrix->isSimilarity()) {
119 // The stroker currently only supports rigid-body transfoms for the stroke lines
120 // themselves. This limitation doesn't affect hairlines since their stroke lines are
121 // defined relative to device space.
122 return CanDrawPath::kNo;
123 }
124 // fallthru
125 case SkStrokeRec::kHairline_Style:
126 // The stroker does not support conics yet.
127 return !SkPathPriv::ConicWeightCnt(path) ? CanDrawPath::kYes : CanDrawPath::kNo;
128
129 case SkStrokeRec::kStrokeAndFill_Style:
130 return CanDrawPath::kNo;
Chris Daltondb91c6e2017-09-08 16:25:08 -0600131 }
132
Chris Dalton2f2757f2018-08-31 19:53:15 +0800133 SK_ABORT("Invalid stroke style.");
134 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -0600135}
136
137bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
138 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600139
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600140 SkIRect clipIBounds;
141 GrRenderTargetContext* rtc = args.fRenderTargetContext;
142 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
143
Chris Dalton2f2757f2018-08-31 19:53:15 +0800144 auto op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, *args.fShape,
145 std::move(args.fPaint));
Chris Dalton42c21152018-06-13 15:28:19 -0600146 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600147 return true;
148}
149
Chris Dalton42c21152018-06-13 15:28:19 -0600150void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> opHolder,
151 const DrawPathArgs& args) {
152 if (GrCCDrawPathsOp* op = opHolder.get()) {
153 GrRenderTargetContext* rtc = args.fRenderTargetContext;
154 if (uint32_t opListID = rtc->addDrawOp(*args.fClip, std::move(opHolder))) {
155 op->wasRecorded(this->lookupPendingPaths(opListID));
156 }
157 }
158}
159
Chris Dalton383a2ef2018-01-08 17:21:41 -0500160std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Chris Dalton4c458b12018-06-16 17:22:59 -0600161 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
162 int rtHeight, const GrCaps& caps) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500163 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700164
165 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700166
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600167 GrCCClipPath& clipPath =
Chris Daltond7e22272018-05-23 10:17:17 -0600168 this->lookupPendingPaths(opListID)->fClipPaths[deviceSpacePath.getGenerationID()];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600169 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700170 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600171 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
172 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
173 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
174 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600175 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton2f2757f2018-08-31 19:53:15 +0800176 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Dalton4c458b12018-06-16 17:22:59 -0600177 clipPath.init(croppedPath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600178 } else {
Chris Dalton4c458b12018-06-16 17:22:59 -0600179 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600180 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700181 } else {
182 clipPath.addAccess(accessRect);
183 }
184
185 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500186 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
187 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700188}
189
Chris Dalton1a325d22017-07-14 15:17:41 -0600190void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
191 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton9414c962018-06-14 10:14:50 -0600192 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton4da70192018-06-18 09:51:36 -0600193 using DoCopiesToCache = GrCCDrawPathsOp::DoCopiesToCache;
Chris Daltona32a3c32017-12-05 10:05:21 -0700194 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600195 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500196 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700197
Chris Dalton4da70192018-06-18 09:51:36 -0600198 // Dig up the stashed atlas from the previous flush (if any) so we can attempt to copy any
199 // reusable paths out of it and into the resource cache. We also need to clear its unique key.
200 sk_sp<GrTextureProxy> stashedAtlasProxy;
201 if (fStashedAtlasKey.isValid()) {
202 stashedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(fStashedAtlasKey,
203 GrCCAtlas::kTextureOrigin);
204 if (stashedAtlasProxy) {
205 // Instantiate the proxy so we can clear the underlying texture's unique key.
206 onFlushRP->instatiateProxy(stashedAtlasProxy.get());
207 onFlushRP->removeUniqueKeyFromProxy(fStashedAtlasKey, stashedAtlasProxy.get());
208 } else {
209 fStashedAtlasKey.reset(); // Indicate there is no stashed atlas to copy from.
210 }
211 }
212
Chris Daltond7e22272018-05-23 10:17:17 -0600213 if (fPendingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600214 fStashedAtlasKey.reset();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500215 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700216 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600217
Chris Dalton4da70192018-06-18 09:51:36 -0600218 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600219 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600220 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = SkTMin(2048, maxPreferredRTSize);
221 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
222 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Chris Daltona2b5b642018-06-24 13:08:57 -0600223 specs.fRenderedAtlasSpecs.fMinTextureSize = SkTMin(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600224
Chris Daltond7e22272018-05-23 10:17:17 -0600225 // Move the per-opList paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600226 // and count them up so we can preallocate buffers.
Chris Daltond7e22272018-05-23 10:17:17 -0600227 fFlushingPaths.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600228 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltond7e22272018-05-23 10:17:17 -0600229 auto iter = fPendingPaths.find(opListIDs[i]);
230 if (fPendingPaths.end() == iter) {
231 continue; // No paths on this opList.
Chris Dalton1a325d22017-07-14 15:17:41 -0600232 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700233
tzikbdb49562018-05-28 14:58:00 +0900234 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600235 fPendingPaths.erase(iter);
236
Chris Dalton4da70192018-06-18 09:51:36 -0600237 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Daltona2b5b642018-06-24 13:08:57 -0600238 op->accountForOwnPaths(fPathCache.get(), onFlushRP, fStashedAtlasKey, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700239 }
Chris Daltond7e22272018-05-23 10:17:17 -0600240 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600241 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700242 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600243 }
Chris Dalton4da70192018-06-18 09:51:36 -0600244 fStashedAtlasKey.reset();
Chris Dalton1a325d22017-07-14 15:17:41 -0600245
Chris Dalton4da70192018-06-18 09:51:36 -0600246 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500247 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600248 }
249
Chris Dalton4da70192018-06-18 09:51:36 -0600250 // Determine if there are enough reusable paths from last flush for it to be worth our time to
251 // copy them to cached atlas(es).
Chris Dalton2f2757f2018-08-31 19:53:15 +0800252 int numCopies = specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx] +
253 specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx];
254 DoCopiesToCache doCopies = DoCopiesToCache(numCopies > 100 ||
Chris Daltona2b5b642018-06-24 13:08:57 -0600255 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
Chris Dalton2f2757f2018-08-31 19:53:15 +0800256 if (numCopies && DoCopiesToCache::kNo == doCopies) {
Chris Dalton4da70192018-06-18 09:51:36 -0600257 specs.convertCopiesToRenders();
Chris Dalton2f2757f2018-08-31 19:53:15 +0800258 SkASSERT(!specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx]);
259 SkASSERT(!specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600260 }
261
262 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600263 if (!resources->isMapped()) {
264 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600265 }
266
Chris Dalton4da70192018-06-18 09:51:36 -0600267 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600268 for (const auto& flushingPaths : fFlushingPaths) {
269 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton4da70192018-06-18 09:51:36 -0600270 op->setupResources(onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600271 }
Chris Daltond7e22272018-05-23 10:17:17 -0600272 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600273 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600274 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600275 }
276
Chris Dalton4da70192018-06-18 09:51:36 -0600277 // Allocate resources and then render the atlas(es).
278 if (!resources->finalize(onFlushRP, std::move(stashedAtlasProxy), out)) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600279 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600280 }
Chris Dalton4da70192018-06-18 09:51:36 -0600281 // Verify the stashed atlas got released so its texture could be recycled.
282 SkASSERT(!stashedAtlasProxy);
Chris Dalton1a325d22017-07-14 15:17:41 -0600283
Chris Daltond7e22272018-05-23 10:17:17 -0600284 // Commit flushing paths to the resources once they are successfully completed.
285 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400286 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600287 flushingPaths->fFlushResources = resources;
288 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600289}
290
Chris Dalton3968ff92017-11-27 12:26:31 -0700291void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
292 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600293 SkASSERT(fFlushing);
Chris Dalton4da70192018-06-18 09:51:36 -0600294 SkASSERT(!fStashedAtlasKey.isValid()); // Should have been cleared in preFlush().
Robert Phillips774168e2018-05-31 12:43:27 -0400295
Chris Dalton4da70192018-06-18 09:51:36 -0600296 if (!fFlushingPaths.empty()) {
297 // Note the stashed atlas's key for next flush, if any.
298 auto resources = fFlushingPaths.front()->fFlushResources.get();
299 if (resources && resources->hasStashedAtlas()) {
300 fStashedAtlasKey = resources->stashedAtlasKey();
301 }
302
303 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
304 // resources manually.
305 for (auto& flushingPaths : fFlushingPaths) {
306 flushingPaths->fFlushResources = nullptr;
307 }
308
309 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
310 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400311 }
312
Chris Dalton383a2ef2018-01-08 17:21:41 -0500313 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600314}
Chris Dalton2f2757f2018-08-31 19:53:15 +0800315
316void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
317 SkPath* out) {
318 SkPath cropboxPath;
319 cropboxPath.addRect(SkRect::Make(cropbox));
320 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
321 // This can fail if the PathOps encounter NaN or infinities.
322 out->reset();
323 }
324 out->setIsVolatile(true);
325}