blob: 9ad945cad7a781c1a7e507816674807454af5c66 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
Robert Phillips777707b2018-01-17 11:40:14 -050012#include "GrProxyProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060013#include "SkMakeUnique.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060014#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050015#include "ccpr/GrCCClipProcessor.h"
Chris Daltond7e22272018-05-23 10:17:17 -060016#include "ccpr/GrCCDrawPathsOp.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060017#include "ccpr/GrCCPathParser.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
21// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
22// precision.
23static constexpr float kPathCropThreshold = 1 << 16;
24
25static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060026 SkPath cropboxPath;
27 cropboxPath.addRect(SkRect::Make(cropbox));
28 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
Chris Daltona32a3c32017-12-05 10:05:21 -070029 // This can fail if the PathOps encounter NaN or infinities.
30 out->reset();
31 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060032 out->setIsVolatile(true);
Chris Daltona32a3c32017-12-05 10:05:21 -070033}
Chris Dalton1a325d22017-07-14 15:17:41 -060034
Robert Phillips774168e2018-05-31 12:43:27 -040035
36GrCCPerOpListPaths::~GrCCPerOpListPaths() {
37 // Ensure there are no surviving DrawPathsOps with a dangling pointer into this class.
38 if (!fDrawOps.isEmpty()) {
39 SK_ABORT("GrCCDrawPathsOp(s) not deleted during flush");
40 }
41 // Clip lazy proxies also reference this class from their callbacks, but those callbacks
42 // are only invoked at flush time while we are still alive. (Unlike DrawPathsOps, that
43 // unregister themselves upon destruction.) So it shouldn't matter if any clip proxies
44 // are still around.
45}
46
Chris Dalton1a325d22017-07-14 15:17:41 -060047bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
48 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050049 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
50 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060051 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050052 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060053 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060054}
55
Chris Dalton383a2ef2018-01-08 17:21:41 -050056sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
57 const GrCaps& caps, bool drawCachablePaths) {
Chris Daltona2ac30d2017-10-17 10:40:01 -060058 auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr;
59 return sk_sp<GrCoverageCountingPathRenderer>(ccpr);
Chris Dalton1a325d22017-07-14 15:17:41 -060060}
61
Chris Daltond7e22272018-05-23 10:17:17 -060062GrCCPerOpListPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opListID) {
63 auto it = fPendingPaths.find(opListID);
64 if (fPendingPaths.end() == it) {
Robert Phillips774168e2018-05-31 12:43:27 -040065 sk_sp<GrCCPerOpListPaths> paths = sk_make_sp<GrCCPerOpListPaths>();
Chris Daltond7e22272018-05-23 10:17:17 -060066 it = fPendingPaths.insert(std::make_pair(opListID, std::move(paths))).first;
67 }
68 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060069}
70
Chris Dalton383a2ef2018-01-08 17:21:41 -050071GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
72 const CanDrawPathArgs& args) const {
Chris Daltona2ac30d2017-10-17 10:40:01 -060073 if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) {
74 return CanDrawPath::kNo;
75 }
76
Chris Dalton383a2ef2018-01-08 17:21:41 -050077 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
78 args.fViewMatrix->hasPerspective() || GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060079 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060080 }
81
82 SkPath path;
83 args.fShape->asPath(&path);
Chris Daltondb91c6e2017-09-08 16:25:08 -060084 SkRect devBounds;
85 SkIRect devIBounds;
86 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
87 devBounds.roundOut(&devIBounds);
88 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
89 // Path is completely clipped away. Our code will eventually notice this before doing any
90 // real work.
91 return CanDrawPath::kYes;
92 }
93
94 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
95 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
96 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
97 return CanDrawPath::kAsBackup;
98 }
99
100 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
101 // Complex paths do better cached in an SDF, if the renderer will accept them.
102 return CanDrawPath::kAsBackup;
103 }
104
Chris Dalton5ed44232017-09-07 13:22:46 -0600105 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -0600106}
107
108bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
109 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600110
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600111 SkIRect clipIBounds;
112 GrRenderTargetContext* rtc = args.fRenderTargetContext;
113 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
114
115 SkPath path;
116 args.fShape->asPath(&path);
Chris Dalton1a325d22017-07-14 15:17:41 -0600117
118 SkRect devBounds;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600119 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
120
Chris Dalton42c21152018-06-13 15:28:19 -0600121 std::unique_ptr<GrCCDrawPathsOp> op;
Chris Daltona32a3c32017-12-05 10:05:21 -0700122 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600123 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
124 SkPath croppedPath;
125 path.transform(*args.fViewMatrix, &croppedPath);
126 crop_path(croppedPath, clipIBounds, &croppedPath);
Chris Dalton42c21152018-06-13 15:28:19 -0600127 // FIXME: This breaks local coords: http://skbug.com/8003
128 op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, SkMatrix::I(), croppedPath,
129 croppedPath.getBounds(), std::move(args.fPaint));
Robert Phillips88a32ef2018-06-07 11:05:56 -0400130 } else {
Chris Dalton42c21152018-06-13 15:28:19 -0600131 op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, path, devBounds,
132 std::move(args.fPaint));
Chris Dalton1a325d22017-07-14 15:17:41 -0600133 }
134
Chris Dalton42c21152018-06-13 15:28:19 -0600135 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600136 return true;
137}
138
Chris Dalton42c21152018-06-13 15:28:19 -0600139void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> opHolder,
140 const DrawPathArgs& args) {
141 if (GrCCDrawPathsOp* op = opHolder.get()) {
142 GrRenderTargetContext* rtc = args.fRenderTargetContext;
143 if (uint32_t opListID = rtc->addDrawOp(*args.fClip, std::move(opHolder))) {
144 op->wasRecorded(this->lookupPendingPaths(opListID));
145 }
146 }
147}
148
Chris Dalton383a2ef2018-01-08 17:21:41 -0500149std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Robert Phillips777707b2018-01-17 11:40:14 -0500150 GrProxyProvider* proxyProvider,
151 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect,
152 int rtWidth, int rtHeight) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500153 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700154
155 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700156
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600157 GrCCClipPath& clipPath =
Chris Daltond7e22272018-05-23 10:17:17 -0600158 this->lookupPendingPaths(opListID)->fClipPaths[deviceSpacePath.getGenerationID()];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600159 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700160 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600161 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
162 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
163 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
164 SkPath croppedPath;
165 int maxRTSize = proxyProvider->caps()->maxRenderTargetSize();
166 crop_path(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
167 clipPath.init(proxyProvider, croppedPath, accessRect, rtWidth, rtHeight);
168 } else {
169 clipPath.init(proxyProvider, deviceSpacePath, accessRect, rtWidth, rtHeight);
170 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700171 } else {
172 clipPath.addAccess(accessRect);
173 }
174
175 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500176 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
177 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700178}
179
Chris Dalton1a325d22017-07-14 15:17:41 -0600180void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
181 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600182 SkTArray<sk_sp<GrRenderTargetContext>>* atlasDraws) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700183 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600184 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500185 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700186
Chris Daltond7e22272018-05-23 10:17:17 -0600187 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500188 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700189 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600190
Chris Dalton42c21152018-06-13 15:28:19 -0600191 GrCCPerFlushResourceSpecs resourceSpecs;
192 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
193 resourceSpecs.fAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
194 resourceSpecs.fAtlasSpecs.fMinTextureSize = SkTMin(1024, maxPreferredRTSize);
195
Chris Daltond7e22272018-05-23 10:17:17 -0600196 // Move the per-opList paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600197 // and count them up so we can preallocate buffers.
Chris Daltond7e22272018-05-23 10:17:17 -0600198 fFlushingPaths.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600199 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltond7e22272018-05-23 10:17:17 -0600200 auto iter = fPendingPaths.find(opListIDs[i]);
201 if (fPendingPaths.end() == iter) {
202 continue; // No paths on this opList.
Chris Dalton1a325d22017-07-14 15:17:41 -0600203 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700204
tzikbdb49562018-05-28 14:58:00 +0900205 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600206 fPendingPaths.erase(iter);
207
208 for (const GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Dalton42c21152018-06-13 15:28:19 -0600209 op->accountForOwnPaths(&resourceSpecs);
Chris Dalton080baa42017-11-06 14:19:19 -0700210 }
Chris Daltond7e22272018-05-23 10:17:17 -0600211 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton42c21152018-06-13 15:28:19 -0600212 clipsIter.second.accountForOwnPath(&resourceSpecs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700213 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600214 }
215
Chris Dalton42c21152018-06-13 15:28:19 -0600216 if (resourceSpecs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500217 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600218 }
219
Chris Dalton42c21152018-06-13 15:28:19 -0600220 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, resourceSpecs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600221 if (!resources->isMapped()) {
222 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600223 }
224
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600225 // Layout atlas(es) and parse paths.
Chris Dalton3917b1e2018-05-09 00:40:52 -0600226 SkDEBUGCODE(int numSkippedPaths = 0);
Chris Daltond7e22272018-05-23 10:17:17 -0600227 for (const auto& flushingPaths : fFlushingPaths) {
228 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600229 op->setupResources(resources.get(), onFlushRP);
Chris Dalton3917b1e2018-05-09 00:40:52 -0600230 SkDEBUGCODE(numSkippedPaths += op->numSkippedInstances_debugOnly());
Chris Dalton1a325d22017-07-14 15:17:41 -0600231 }
Chris Daltond7e22272018-05-23 10:17:17 -0600232 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600233 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600234 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600235 }
Chris Dalton42c21152018-06-13 15:28:19 -0600236 SkASSERT(resources->nextPathInstanceIdx() == resourceSpecs.fNumRenderedPaths - numSkippedPaths);
Chris Dalton1a325d22017-07-14 15:17:41 -0600237
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600238 // Allocate the atlases and create instance buffers to draw them.
239 if (!resources->finalize(onFlushRP, atlasDraws)) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600240 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600241 }
242
Chris Daltond7e22272018-05-23 10:17:17 -0600243 // Commit flushing paths to the resources once they are successfully completed.
244 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400245 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600246 flushingPaths->fFlushResources = resources;
247 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600248}
249
Chris Dalton3968ff92017-11-27 12:26:31 -0700250void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
251 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600252 SkASSERT(fFlushing);
Robert Phillips774168e2018-05-31 12:43:27 -0400253
254 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
255 // resources manually.
256 for (auto& flushingPaths : fFlushingPaths) {
257 flushingPaths->fFlushResources = nullptr;
258 }
259
Chris Daltona32a3c32017-12-05 10:05:21 -0700260 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
Chris Daltond7e22272018-05-23 10:17:17 -0600261 fFlushingPaths.reset();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500262 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600263}