blob: 4358d21e24e93f72fde22fb3ea4a35d04893efdb [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
Robert Phillips777707b2018-01-17 11:40:14 -050012#include "GrProxyProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060013#include "SkMakeUnique.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060014#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050015#include "ccpr/GrCCClipProcessor.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060016#include "ccpr/GrCCPathParser.h"
17#include "ccpr/GrCCPerFlushResources.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
21// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
22// precision.
23static constexpr float kPathCropThreshold = 1 << 16;
24
25static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060026 SkPath cropboxPath;
27 cropboxPath.addRect(SkRect::Make(cropbox));
28 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
Chris Daltona32a3c32017-12-05 10:05:21 -070029 // This can fail if the PathOps encounter NaN or infinities.
30 out->reset();
31 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060032 out->setIsVolatile(true);
Chris Daltona32a3c32017-12-05 10:05:21 -070033}
Chris Dalton1a325d22017-07-14 15:17:41 -060034
35bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
36 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050037 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
38 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060039 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050040 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060041 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060042}
43
Chris Dalton383a2ef2018-01-08 17:21:41 -050044sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
45 const GrCaps& caps, bool drawCachablePaths) {
Chris Daltona2ac30d2017-10-17 10:40:01 -060046 auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr;
47 return sk_sp<GrCoverageCountingPathRenderer>(ccpr);
Chris Dalton1a325d22017-07-14 15:17:41 -060048}
49
Chris Dalton5ba36ba2018-05-09 01:08:38 -060050GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(bool drawCachablePaths)
51 : fDrawCachablePaths(drawCachablePaths) {
52}
53
54GrCoverageCountingPathRenderer::~GrCoverageCountingPathRenderer() {
55 // Ensure no Ops exist that could have a dangling pointer back into this class.
56 SkASSERT(fRTPendingPathsMap.empty());
57 SkASSERT(0 == fNumOutstandingDrawOps);
58}
59
Chris Dalton383a2ef2018-01-08 17:21:41 -050060GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
61 const CanDrawPathArgs& args) const {
Chris Daltona2ac30d2017-10-17 10:40:01 -060062 if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) {
63 return CanDrawPath::kNo;
64 }
65
Chris Dalton383a2ef2018-01-08 17:21:41 -050066 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
67 args.fViewMatrix->hasPerspective() || GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060068 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060069 }
70
71 SkPath path;
72 args.fShape->asPath(&path);
Chris Daltondb91c6e2017-09-08 16:25:08 -060073 SkRect devBounds;
74 SkIRect devIBounds;
75 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
76 devBounds.roundOut(&devIBounds);
77 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
78 // Path is completely clipped away. Our code will eventually notice this before doing any
79 // real work.
80 return CanDrawPath::kYes;
81 }
82
83 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
84 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
85 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
86 return CanDrawPath::kAsBackup;
87 }
88
89 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
90 // Complex paths do better cached in an SDF, if the renderer will accept them.
91 return CanDrawPath::kAsBackup;
92 }
93
Chris Dalton5ed44232017-09-07 13:22:46 -060094 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -060095}
96
97bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
98 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -060099
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600100 SkIRect clipIBounds;
101 GrRenderTargetContext* rtc = args.fRenderTargetContext;
102 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
103
104 SkPath path;
105 args.fShape->asPath(&path);
Chris Dalton1a325d22017-07-14 15:17:41 -0600106
107 SkRect devBounds;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600108 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
109
Chris Daltona32a3c32017-12-05 10:05:21 -0700110 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600111 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
112 SkPath croppedPath;
113 path.transform(*args.fViewMatrix, &croppedPath);
114 crop_path(croppedPath, clipIBounds, &croppedPath);
115 auto op = skstd::make_unique<GrCCDrawPathsOp>(this, std::move(args.fPaint), clipIBounds,
116 SkMatrix::I(), croppedPath, path.getBounds());
117 rtc->addDrawOp(*args.fClip, std::move(op));
118 return true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600119 }
120
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600121 auto op = skstd::make_unique<GrCCDrawPathsOp>(this, std::move(args.fPaint), clipIBounds,
122 *args.fViewMatrix, path, devBounds);
123 rtc->addDrawOp(*args.fClip, std::move(op));
Chris Dalton1a325d22017-07-14 15:17:41 -0600124 return true;
125}
126
Chris Dalton383a2ef2018-01-08 17:21:41 -0500127std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Robert Phillips777707b2018-01-17 11:40:14 -0500128 GrProxyProvider* proxyProvider,
129 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect,
130 int rtWidth, int rtHeight) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500131 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700132
133 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700134
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600135 GrCCClipPath& clipPath =
136 fRTPendingPathsMap[opListID].fClipPaths[deviceSpacePath.getGenerationID()];
137 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700138 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600139 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
140 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
141 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
142 SkPath croppedPath;
143 int maxRTSize = proxyProvider->caps()->maxRenderTargetSize();
144 crop_path(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
145 clipPath.init(proxyProvider, croppedPath, accessRect, rtWidth, rtHeight);
146 } else {
147 clipPath.init(proxyProvider, deviceSpacePath, accessRect, rtWidth, rtHeight);
148 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700149 } else {
150 clipPath.addAccess(accessRect);
151 }
152
153 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500154 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
155 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700156}
157
Chris Dalton1a325d22017-07-14 15:17:41 -0600158void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
159 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600160 SkTArray<sk_sp<GrRenderTargetContext>>* atlasDraws) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700161 SkASSERT(!fFlushing);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600162 SkASSERT(!fPerFlushResources);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500163 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700164
165 if (fRTPendingPathsMap.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500166 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700167 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600168
Chris Dalton3917b1e2018-05-09 00:40:52 -0600169 // Count up the paths about to be flushed so we can preallocate buffers.
170 int numPathDraws = 0;
171 int numClipPaths = 0;
172 GrCCPathParser::PathStats flushingPathStats;
173 fFlushingRTPathIters.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600174 for (int i = 0; i < numOpListIDs; ++i) {
Chris Dalton3917b1e2018-05-09 00:40:52 -0600175 auto iter = fRTPendingPathsMap.find(opListIDs[i]);
176 if (fRTPendingPathsMap.end() == iter) {
Chris Dalton080baa42017-11-06 14:19:19 -0700177 continue;
Chris Dalton1a325d22017-07-14 15:17:41 -0600178 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600179 const GrCCRTPendingPaths& rtPendingPaths = iter->second;
Chris Daltona32a3c32017-12-05 10:05:21 -0700180
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600181 SkTInternalLList<GrCCDrawPathsOp>::Iter drawOpsIter;
Chris Daltona32a3c32017-12-05 10:05:21 -0700182 drawOpsIter.init(rtPendingPaths.fDrawOps,
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600183 SkTInternalLList<GrCCDrawPathsOp>::Iter::kHead_IterStart);
184 while (GrCCDrawPathsOp* op = drawOpsIter.get()) {
185 for (const GrCCDrawPathsOp::SingleDraw* draw = op->head(); draw; draw = draw->fNext) {
Chris Dalton3917b1e2018-05-09 00:40:52 -0600186 ++numPathDraws;
187 flushingPathStats.statPath(draw->fPath);
Chris Dalton080baa42017-11-06 14:19:19 -0700188 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700189 drawOpsIter.next();
Chris Dalton080baa42017-11-06 14:19:19 -0700190 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700191
Chris Dalton3917b1e2018-05-09 00:40:52 -0600192 numClipPaths += rtPendingPaths.fClipPaths.size();
Chris Daltona32a3c32017-12-05 10:05:21 -0700193 for (const auto& clipsIter : rtPendingPaths.fClipPaths) {
Chris Dalton3917b1e2018-05-09 00:40:52 -0600194 flushingPathStats.statPath(clipsIter.second.deviceSpacePath());
Chris Daltona32a3c32017-12-05 10:05:21 -0700195 }
Chris Dalton3917b1e2018-05-09 00:40:52 -0600196
197 fFlushingRTPathIters.push_back(std::move(iter));
Chris Dalton1a325d22017-07-14 15:17:41 -0600198 }
199
Chris Dalton3917b1e2018-05-09 00:40:52 -0600200 if (0 == numPathDraws + numClipPaths) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500201 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600202 }
203
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600204 auto resources = skstd::make_unique<GrCCPerFlushResources>(onFlushRP, numPathDraws,
205 numClipPaths, flushingPathStats);
206 if (!resources->isMapped()) {
207 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600208 }
209
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600210 // Layout atlas(es) and parse paths.
Chris Dalton3917b1e2018-05-09 00:40:52 -0600211 SkDEBUGCODE(int numSkippedPaths = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600212 for (int i = 0; i < numOpListIDs; ++i) {
213 auto it = fRTPendingPathsMap.find(opListIDs[i]);
214 if (fRTPendingPathsMap.end() == it) {
215 continue;
216 }
217 GrCCRTPendingPaths& rtPendingPaths = it->second;
Chris Dalton1a325d22017-07-14 15:17:41 -0600218
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600219 SkTInternalLList<GrCCDrawPathsOp>::Iter drawOpsIter;
220 drawOpsIter.init(rtPendingPaths.fDrawOps,
221 SkTInternalLList<GrCCDrawPathsOp>::Iter::kHead_IterStart);
222 while (GrCCDrawPathsOp* op = drawOpsIter.get()) {
223 op->setupResources(resources.get(), onFlushRP);
Chris Daltona32a3c32017-12-05 10:05:21 -0700224 drawOpsIter.next();
Chris Dalton3917b1e2018-05-09 00:40:52 -0600225 SkDEBUGCODE(numSkippedPaths += op->numSkippedInstances_debugOnly());
Chris Dalton1a325d22017-07-14 15:17:41 -0600226 }
227
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600228 for (auto& clipsIter : rtPendingPaths.fClipPaths) {
229 clipsIter.second.placePathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600230 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600231 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600232 SkASSERT(resources->pathInstanceCount() == numPathDraws - numSkippedPaths);
Chris Dalton1a325d22017-07-14 15:17:41 -0600233
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600234 // Allocate the atlases and create instance buffers to draw them.
235 if (!resources->finalize(onFlushRP, atlasDraws)) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600236 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600237 }
238
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600239 fPerFlushResources = std::move(resources);
Chris Dalton1a325d22017-07-14 15:17:41 -0600240}
241
Chris Dalton3968ff92017-11-27 12:26:31 -0700242void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
243 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600244 SkASSERT(fFlushing);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600245 fPerFlushResources.reset();
Chris Daltona32a3c32017-12-05 10:05:21 -0700246 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
Chris Dalton3917b1e2018-05-09 00:40:52 -0600247 for (const auto& iter : fFlushingRTPathIters) {
248 fRTPendingPathsMap.erase(iter);
Chris Daltona32a3c32017-12-05 10:05:21 -0700249 }
Chris Dalton3917b1e2018-05-09 00:40:52 -0600250 fFlushingRTPathIters.reset();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500251 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600252}