Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrCoverageCountingPathRenderer.h" |
| 9 | |
| 10 | #include "GrCaps.h" |
| 11 | #include "GrClip.h" |
| 12 | #include "GrGpu.h" |
| 13 | #include "GrGpuCommandBuffer.h" |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 14 | #include "GrOpFlushState.h" |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 15 | #include "GrProxyProvider.h" |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 16 | #include "GrRenderTargetOpList.h" |
| 17 | #include "GrStyle.h" |
| 18 | #include "GrTexture.h" |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 19 | #include "SkMakeUnique.h" |
| 20 | #include "SkMatrix.h" |
Chris Dalton | a039d3b | 2017-09-28 11:16:36 -0600 | [diff] [blame] | 21 | #include "SkPathOps.h" |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 22 | #include "ccpr/GrCCClipProcessor.h" |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 23 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 24 | // Shorthand for keeping line lengths under control with nested classes... |
| 25 | using CCPR = GrCoverageCountingPathRenderer; |
| 26 | |
| 27 | // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32 |
| 28 | // precision. |
| 29 | static constexpr float kPathCropThreshold = 1 << 16; |
| 30 | |
| 31 | static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) { |
| 32 | SkPath cropPath; |
| 33 | cropPath.addRect(SkRect::Make(cropbox)); |
| 34 | if (!Op(cropPath, path, kIntersect_SkPathOp, out)) { |
| 35 | // This can fail if the PathOps encounter NaN or infinities. |
| 36 | out->reset(); |
| 37 | } |
| 38 | } |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 39 | |
| 40 | bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) { |
| 41 | const GrShaderCaps& shaderCaps = *caps.shaderCaps(); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 42 | return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() && |
| 43 | caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() && |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 44 | caps.isConfigTexturable(kAlpha_half_GrPixelConfig) && |
Chris Dalton | e4679fa | 2017-09-29 13:58:26 -0600 | [diff] [blame] | 45 | caps.isConfigRenderable(kAlpha_half_GrPixelConfig, /*withMSAA=*/false) && |
| 46 | !caps.blacklistCoverageCounting(); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 47 | } |
| 48 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 49 | sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported( |
| 50 | const GrCaps& caps, bool drawCachablePaths) { |
Chris Dalton | a2ac30d | 2017-10-17 10:40:01 -0600 | [diff] [blame] | 51 | auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr; |
| 52 | return sk_sp<GrCoverageCountingPathRenderer>(ccpr); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 53 | } |
| 54 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 55 | GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath( |
| 56 | const CanDrawPathArgs& args) const { |
Chris Dalton | a2ac30d | 2017-10-17 10:40:01 -0600 | [diff] [blame] | 57 | if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) { |
| 58 | return CanDrawPath::kNo; |
| 59 | } |
| 60 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 61 | if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() || |
| 62 | args.fViewMatrix->hasPerspective() || GrAAType::kCoverage != args.fAAType) { |
Chris Dalton | 5ed4423 | 2017-09-07 13:22:46 -0600 | [diff] [blame] | 63 | return CanDrawPath::kNo; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | SkPath path; |
| 67 | args.fShape->asPath(&path); |
Chris Dalton | 5ed4423 | 2017-09-07 13:22:46 -0600 | [diff] [blame] | 68 | if (SkPathPriv::ConicWeightCnt(path)) { |
| 69 | return CanDrawPath::kNo; |
| 70 | } |
| 71 | |
Chris Dalton | db91c6e | 2017-09-08 16:25:08 -0600 | [diff] [blame] | 72 | SkRect devBounds; |
| 73 | SkIRect devIBounds; |
| 74 | args.fViewMatrix->mapRect(&devBounds, path.getBounds()); |
| 75 | devBounds.roundOut(&devIBounds); |
| 76 | if (!devIBounds.intersect(*args.fClipConservativeBounds)) { |
| 77 | // Path is completely clipped away. Our code will eventually notice this before doing any |
| 78 | // real work. |
| 79 | return CanDrawPath::kYes; |
| 80 | } |
| 81 | |
| 82 | if (devIBounds.height() * devIBounds.width() > 256 * 256) { |
| 83 | // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering |
| 84 | // algorithm. Give the simpler direct renderers a chance before we commit to drawing it. |
| 85 | return CanDrawPath::kAsBackup; |
| 86 | } |
| 87 | |
| 88 | if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) { |
| 89 | // Complex paths do better cached in an SDF, if the renderer will accept them. |
| 90 | return CanDrawPath::kAsBackup; |
| 91 | } |
| 92 | |
Chris Dalton | 5ed4423 | 2017-09-07 13:22:46 -0600 | [diff] [blame] | 93 | return CanDrawPath::kYes; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 94 | } |
| 95 | |
| 96 | bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) { |
| 97 | SkASSERT(!fFlushing); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 98 | auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor()); |
| 99 | args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op)); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 100 | return true; |
| 101 | } |
| 102 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 103 | CCPR::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr, const DrawPathArgs& args, |
| 104 | GrColor color) |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 105 | : INHERITED(ClassID()) |
| 106 | , fCCPR(ccpr) |
| 107 | , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint)) |
| 108 | , fProcessors(std::move(args.fPaint)) |
| 109 | , fTailDraw(&fHeadDraw) |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 110 | , fOwningRTPendingPaths(nullptr) { |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 111 | SkDEBUGCODE(++fCCPR->fPendingDrawOpsCount); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 112 | SkDEBUGCODE(fBaseInstance = -1); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 113 | SkDEBUGCODE(fInstanceCount = 1); |
| 114 | SkDEBUGCODE(fNumSkippedInstances = 0); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 115 | GrRenderTargetContext* const rtc = args.fRenderTargetContext; |
| 116 | |
| 117 | SkRect devBounds; |
| 118 | args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds()); |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 119 | args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipIBounds, |
| 120 | nullptr); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 121 | if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) { |
| 122 | // The path is too large. We need to crop it or analytic AA can run out of fp32 precision. |
| 123 | SkPath path; |
Chris Dalton | a039d3b | 2017-09-28 11:16:36 -0600 | [diff] [blame] | 124 | args.fShape->asPath(&path); |
| 125 | path.transform(*args.fViewMatrix); |
| 126 | fHeadDraw.fMatrix.setIdentity(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 127 | crop_path(path, fHeadDraw.fClipIBounds, &fHeadDraw.fPath); |
Chris Dalton | a039d3b | 2017-09-28 11:16:36 -0600 | [diff] [blame] | 128 | devBounds = fHeadDraw.fPath.getBounds(); |
Chris Dalton | a039d3b | 2017-09-28 11:16:36 -0600 | [diff] [blame] | 129 | } else { |
| 130 | fHeadDraw.fMatrix = *args.fViewMatrix; |
| 131 | args.fShape->asPath(&fHeadDraw.fPath); |
Chris Dalton | a039d3b | 2017-09-28 11:16:36 -0600 | [diff] [blame] | 132 | } |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 133 | fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd. |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 134 | |
| 135 | // FIXME: intersect with clip bounds to (hopefully) improve batching. |
| 136 | // (This is nontrivial due to assumptions in generating the octagon cover geometry.) |
| 137 | this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo); |
| 138 | } |
| 139 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 140 | CCPR::DrawPathsOp::~DrawPathsOp() { |
| 141 | if (fOwningRTPendingPaths) { |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 142 | // Remove CCPR's dangling pointer to this Op before deleting it. |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 143 | fOwningRTPendingPaths->fDrawOps.remove(this); |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 144 | } |
| 145 | SkDEBUGCODE(--fCCPR->fPendingDrawOpsCount); |
| 146 | } |
| 147 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 148 | GrDrawOp::RequiresDstTexture CCPR::DrawPathsOp::finalize(const GrCaps& caps, |
| 149 | const GrAppliedClip* clip, |
| 150 | GrPixelConfigIsClamped dstIsClamped) { |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 151 | SkASSERT(!fCCPR->fFlushing); |
| 152 | // There should only be one single path draw in this Op right now. |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 153 | SkASSERT(1 == fInstanceCount); |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 154 | SkASSERT(&fHeadDraw == fTailDraw); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 155 | GrProcessorSet::Analysis analysis = |
| 156 | fProcessors.finalize(fHeadDraw.fColor, GrProcessorAnalysisCoverage::kSingleChannel, |
| 157 | clip, false, caps, dstIsClamped, &fHeadDraw.fColor); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 158 | return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo; |
| 159 | } |
| 160 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 161 | bool CCPR::DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) { |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 162 | DrawPathsOp* that = op->cast<DrawPathsOp>(); |
| 163 | SkASSERT(fCCPR == that->fCCPR); |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 164 | SkASSERT(!fCCPR->fFlushing); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 165 | SkASSERT(fOwningRTPendingPaths); |
| 166 | SkASSERT(fInstanceCount); |
| 167 | SkASSERT(!that->fOwningRTPendingPaths || that->fOwningRTPendingPaths == fOwningRTPendingPaths); |
| 168 | SkASSERT(that->fInstanceCount); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 169 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 170 | if (this->getFillType() != that->getFillType() || fSRGBFlags != that->fSRGBFlags || |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 171 | fProcessors != that->fProcessors) { |
| 172 | return false; |
| 173 | } |
| 174 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 175 | fTailDraw->fNext = &fOwningRTPendingPaths->fDrawsAllocator.push_back(that->fHeadDraw); |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 176 | fTailDraw = (that->fTailDraw == &that->fHeadDraw) ? fTailDraw->fNext : that->fTailDraw; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 177 | |
| 178 | this->joinBounds(*that); |
| 179 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 180 | SkDEBUGCODE(fInstanceCount += that->fInstanceCount); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 181 | SkDEBUGCODE(that->fInstanceCount = 0); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 182 | return true; |
| 183 | } |
| 184 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 185 | void CCPR::DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) { |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 186 | SkASSERT(!fCCPR->fFlushing); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 187 | SkASSERT(!fOwningRTPendingPaths); |
| 188 | fOwningRTPendingPaths = &fCCPR->fRTPendingPathsMap[opList->uniqueID()]; |
| 189 | fOwningRTPendingPaths->fDrawOps.addToTail(this); |
| 190 | } |
| 191 | |
| 192 | bool GrCoverageCountingPathRenderer::canMakeClipProcessor(const SkPath& deviceSpacePath) const { |
| 193 | if (!fDrawCachablePaths && !deviceSpacePath.isVolatile()) { |
| 194 | return false; |
| 195 | } |
| 196 | |
| 197 | if (SkPathPriv::ConicWeightCnt(deviceSpacePath)) { |
| 198 | return false; |
| 199 | } |
| 200 | |
| 201 | return true; |
| 202 | } |
| 203 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 204 | std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor( |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 205 | GrProxyProvider* proxyProvider, |
| 206 | uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect, |
| 207 | int rtWidth, int rtHeight) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 208 | using MustCheckBounds = GrCCClipProcessor::MustCheckBounds; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 209 | |
| 210 | SkASSERT(!fFlushing); |
| 211 | SkASSERT(this->canMakeClipProcessor(deviceSpacePath)); |
| 212 | |
| 213 | ClipPath& clipPath = fRTPendingPathsMap[opListID].fClipPaths[deviceSpacePath.getGenerationID()]; |
| 214 | if (clipPath.isUninitialized()) { |
| 215 | // This ClipPath was just created during lookup. Initialize it. |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 216 | clipPath.init(proxyProvider, deviceSpacePath, accessRect, rtWidth, rtHeight); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 217 | } else { |
| 218 | clipPath.addAccess(accessRect); |
| 219 | } |
| 220 | |
| 221 | bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 222 | return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds), |
| 223 | deviceSpacePath.getFillType()); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 224 | } |
| 225 | |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 226 | void CCPR::ClipPath::init(GrProxyProvider* proxyProvider, |
| 227 | const SkPath& deviceSpacePath, const SkIRect& accessRect, |
| 228 | int rtWidth, int rtHeight) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 229 | SkASSERT(this->isUninitialized()); |
| 230 | |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 231 | fAtlasLazyProxy = proxyProvider->createFullyLazyProxy( |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 232 | [this](GrResourceProvider* resourceProvider, GrSurfaceOrigin* outOrigin) { |
Greg Daniel | 94a6ce8 | 2018-01-16 16:14:41 -0500 | [diff] [blame] | 233 | if (!resourceProvider) { |
| 234 | return sk_sp<GrTexture>(); |
| 235 | } |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 236 | SkASSERT(fHasAtlas); |
| 237 | SkASSERT(!fHasAtlasTransform); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 238 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 239 | GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr; |
| 240 | if (!textureProxy || !textureProxy->instantiate(resourceProvider)) { |
| 241 | fAtlasScale = fAtlasTranslate = {0, 0}; |
| 242 | SkDEBUGCODE(fHasAtlasTransform = true); |
| 243 | return sk_sp<GrTexture>(); |
| 244 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 245 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 246 | fAtlasScale = {1.f / textureProxy->width(), 1.f / textureProxy->height()}; |
| 247 | fAtlasTranslate = {fAtlasOffsetX * fAtlasScale.x(), |
| 248 | fAtlasOffsetY * fAtlasScale.y()}; |
| 249 | if (kBottomLeft_GrSurfaceOrigin == textureProxy->origin()) { |
| 250 | fAtlasScale.fY = -fAtlasScale.y(); |
| 251 | fAtlasTranslate.fY = 1 - fAtlasTranslate.y(); |
| 252 | } |
| 253 | SkDEBUGCODE(fHasAtlasTransform = true); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 254 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 255 | *outOrigin = textureProxy->origin(); |
| 256 | return sk_ref_sp(textureProxy->priv().peekTexture()); |
| 257 | }, |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 258 | GrProxyProvider::Renderable::kYes, kAlpha_half_GrPixelConfig); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 259 | |
| 260 | const SkRect& pathDevBounds = deviceSpacePath.getBounds(); |
| 261 | if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) { |
| 262 | // The path is too large. We need to crop it or analytic AA can run out of fp32 precision. |
| 263 | crop_path(deviceSpacePath, SkIRect::MakeWH(rtWidth, rtHeight), &fDeviceSpacePath); |
| 264 | } else { |
| 265 | fDeviceSpacePath = deviceSpacePath; |
| 266 | } |
| 267 | deviceSpacePath.getBounds().roundOut(&fPathDevIBounds); |
| 268 | fAccessRect = accessRect; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 269 | } |
| 270 | |
| 271 | void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
| 272 | const uint32_t* opListIDs, int numOpListIDs, |
| 273 | SkTArray<sk_sp<GrRenderTargetContext>>* results) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 274 | using PathInstance = GrCCPathProcessor::Instance; |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 275 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 276 | SkASSERT(!fFlushing); |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 277 | SkASSERT(!fPerFlushIndexBuffer); |
| 278 | SkASSERT(!fPerFlushVertexBuffer); |
| 279 | SkASSERT(!fPerFlushInstanceBuffer); |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 280 | SkASSERT(!fPerFlushPathParser); |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 281 | SkASSERT(fPerFlushAtlases.empty()); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 282 | SkDEBUGCODE(fFlushing = true); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 283 | |
| 284 | if (fRTPendingPathsMap.empty()) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 285 | return; // Nothing to draw. |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 286 | } |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 287 | |
| 288 | fPerFlushResourcesAreValid = false; |
| 289 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 290 | // Count the paths that are being flushed. |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 291 | int maxTotalPaths = 0, maxPathPoints = 0, numSkPoints = 0, numSkVerbs = 0; |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 292 | SkDEBUGCODE(int numClipPaths = 0); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 293 | for (int i = 0; i < numOpListIDs; ++i) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 294 | auto it = fRTPendingPathsMap.find(opListIDs[i]); |
| 295 | if (fRTPendingPathsMap.end() == it) { |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 296 | continue; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 297 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 298 | const RTPendingPaths& rtPendingPaths = it->second; |
| 299 | |
| 300 | SkTInternalLList<DrawPathsOp>::Iter drawOpsIter; |
| 301 | drawOpsIter.init(rtPendingPaths.fDrawOps, |
| 302 | SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart); |
| 303 | while (DrawPathsOp* op = drawOpsIter.get()) { |
| 304 | for (const DrawPathsOp::SingleDraw* draw = op->head(); draw; draw = draw->fNext) { |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 305 | ++maxTotalPaths; |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 306 | maxPathPoints = SkTMax(draw->fPath.countPoints(), maxPathPoints); |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 307 | numSkPoints += draw->fPath.countPoints(); |
| 308 | numSkVerbs += draw->fPath.countVerbs(); |
| 309 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 310 | drawOpsIter.next(); |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 311 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 312 | |
| 313 | maxTotalPaths += rtPendingPaths.fClipPaths.size(); |
| 314 | SkDEBUGCODE(numClipPaths += rtPendingPaths.fClipPaths.size()); |
| 315 | for (const auto& clipsIter : rtPendingPaths.fClipPaths) { |
| 316 | const SkPath& path = clipsIter.second.deviceSpacePath(); |
| 317 | maxPathPoints = SkTMax(path.countPoints(), maxPathPoints); |
| 318 | numSkPoints += path.countPoints(); |
| 319 | numSkVerbs += path.countVerbs(); |
| 320 | } |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 321 | } |
| 322 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 323 | if (!maxTotalPaths) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 324 | return; // Nothing to draw. |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 325 | } |
| 326 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 327 | // Allocate GPU buffers. |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 328 | fPerFlushIndexBuffer = GrCCPathProcessor::FindIndexBuffer(onFlushRP); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 329 | if (!fPerFlushIndexBuffer) { |
| 330 | SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n"); |
| 331 | return; |
| 332 | } |
| 333 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 334 | fPerFlushVertexBuffer = GrCCPathProcessor::FindVertexBuffer(onFlushRP); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 335 | if (!fPerFlushVertexBuffer) { |
| 336 | SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n"); |
| 337 | return; |
| 338 | } |
| 339 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 340 | fPerFlushInstanceBuffer = |
| 341 | onFlushRP->makeBuffer(kVertex_GrBufferType, maxTotalPaths * sizeof(PathInstance)); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 342 | if (!fPerFlushInstanceBuffer) { |
| 343 | SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n"); |
| 344 | return; |
| 345 | } |
| 346 | |
| 347 | PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map()); |
| 348 | SkASSERT(pathInstanceData); |
| 349 | int pathInstanceIdx = 0; |
| 350 | |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 351 | fPerFlushPathParser = sk_make_sp<GrCCPathParser>(maxTotalPaths, maxPathPoints, numSkPoints, |
| 352 | numSkVerbs); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 353 | SkDEBUGCODE(int skippedTotalPaths = 0); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 354 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 355 | // Allocate atlas(es) and fill out GPU instance buffers. |
| 356 | for (int i = 0; i < numOpListIDs; ++i) { |
| 357 | auto it = fRTPendingPathsMap.find(opListIDs[i]); |
| 358 | if (fRTPendingPathsMap.end() == it) { |
| 359 | continue; |
| 360 | } |
| 361 | RTPendingPaths& rtPendingPaths = it->second; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 362 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 363 | SkTInternalLList<DrawPathsOp>::Iter drawOpsIter; |
| 364 | drawOpsIter.init(rtPendingPaths.fDrawOps, |
| 365 | SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart); |
| 366 | while (DrawPathsOp* op = drawOpsIter.get()) { |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 367 | pathInstanceIdx = op->setupResources(onFlushRP, pathInstanceData, pathInstanceIdx); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 368 | drawOpsIter.next(); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 369 | SkDEBUGCODE(skippedTotalPaths += op->numSkippedInstances_debugOnly()); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 370 | } |
| 371 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 372 | for (auto& clipsIter : rtPendingPaths.fClipPaths) { |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 373 | clipsIter.second.placePathInAtlas(this, onFlushRP, fPerFlushPathParser.get()); |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 374 | } |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 375 | } |
| 376 | |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 377 | fPerFlushInstanceBuffer->unmap(); |
| 378 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 379 | SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths - numClipPaths); |
| 380 | |
| 381 | if (!fPerFlushAtlases.empty()) { |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 382 | auto coverageCountBatchID = fPerFlushPathParser->closeCurrentBatch(); |
| 383 | fPerFlushAtlases.back().setCoverageCountBatchID(coverageCountBatchID); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 384 | } |
| 385 | |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 386 | if (!fPerFlushPathParser->finalize(onFlushRP)) { |
| 387 | SkDebugf("WARNING: failed to allocate GPU buffers for CCPR. No paths will be drawn.\n"); |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 388 | return; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 389 | } |
| 390 | |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 391 | // Draw the atlas(es). |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 392 | GrTAllocator<GrCCAtlas>::Iter atlasIter(&fPerFlushAtlases); |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 393 | while (atlasIter.next()) { |
| 394 | if (auto rtc = atlasIter.get()->finalize(onFlushRP, fPerFlushPathParser)) { |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 395 | results->push_back(std::move(rtc)); |
| 396 | } |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 397 | } |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 398 | |
| 399 | fPerFlushResourcesAreValid = true; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 400 | } |
| 401 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 402 | int CCPR::DrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP, |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 403 | GrCCPathProcessor::Instance* pathInstanceData, |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 404 | int pathInstanceIdx) { |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 405 | GrCCPathParser* parser = fCCPR->fPerFlushPathParser.get(); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 406 | const GrCCAtlas* currentAtlas = nullptr; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 407 | SkASSERT(fInstanceCount > 0); |
| 408 | SkASSERT(-1 == fBaseInstance); |
| 409 | fBaseInstance = pathInstanceIdx; |
| 410 | |
| 411 | for (const SingleDraw* draw = this->head(); draw; draw = draw->fNext) { |
| 412 | // parsePath gives us two tight bounding boxes: one in device space, as well as a second |
| 413 | // one rotated an additional 45 degrees. The path vertex shader uses these two bounding |
| 414 | // boxes to generate an octagon that circumscribes the path. |
| 415 | SkRect devBounds, devBounds45; |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 416 | parser->parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 417 | |
| 418 | SkIRect devIBounds; |
| 419 | devBounds.roundOut(&devIBounds); |
| 420 | |
| 421 | int16_t offsetX, offsetY; |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 422 | GrCCAtlas* atlas = fCCPR->placeParsedPathInAtlas(onFlushRP, draw->fClipIBounds, devIBounds, |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 423 | &offsetX, &offsetY); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 424 | if (!atlas) { |
| 425 | SkDEBUGCODE(++fNumSkippedInstances); |
| 426 | continue; |
| 427 | } |
| 428 | if (currentAtlas != atlas) { |
| 429 | if (currentAtlas) { |
| 430 | this->addAtlasBatch(currentAtlas, pathInstanceIdx); |
| 431 | } |
| 432 | currentAtlas = atlas; |
| 433 | } |
| 434 | |
| 435 | const SkMatrix& m = draw->fMatrix; |
| 436 | pathInstanceData[pathInstanceIdx++] = { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 437 | devBounds, |
| 438 | devBounds45, |
| 439 | {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}}, |
| 440 | {{m.getTranslateX(), m.getTranslateY()}}, |
| 441 | {{offsetX, offsetY}}, |
| 442 | draw->fColor}; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 443 | } |
| 444 | |
| 445 | SkASSERT(pathInstanceIdx == fBaseInstance + fInstanceCount - fNumSkippedInstances); |
| 446 | if (currentAtlas) { |
| 447 | this->addAtlasBatch(currentAtlas, pathInstanceIdx); |
| 448 | } |
| 449 | |
| 450 | return pathInstanceIdx; |
| 451 | } |
| 452 | |
| 453 | void CCPR::ClipPath::placePathInAtlas(GrCoverageCountingPathRenderer* ccpr, |
| 454 | GrOnFlushResourceProvider* onFlushRP, |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 455 | GrCCPathParser* parser) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 456 | SkASSERT(!this->isUninitialized()); |
| 457 | SkASSERT(!fHasAtlas); |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 458 | parser->parseDeviceSpacePath(fDeviceSpacePath); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 459 | fAtlas = ccpr->placeParsedPathInAtlas(onFlushRP, fAccessRect, fPathDevIBounds, &fAtlasOffsetX, |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 460 | &fAtlasOffsetY); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 461 | SkDEBUGCODE(fHasAtlas = true); |
| 462 | } |
| 463 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 464 | GrCCAtlas* GrCoverageCountingPathRenderer::placeParsedPathInAtlas( |
| 465 | GrOnFlushResourceProvider* onFlushRP, |
| 466 | const SkIRect& clipIBounds, |
| 467 | const SkIRect& pathIBounds, |
| 468 | int16_t* atlasOffsetX, |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 469 | int16_t* atlasOffsetY) { |
| 470 | using ScissorMode = GrCCPathParser::ScissorMode; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 471 | |
| 472 | ScissorMode scissorMode; |
| 473 | SkIRect clippedPathIBounds; |
| 474 | if (clipIBounds.contains(pathIBounds)) { |
| 475 | clippedPathIBounds = pathIBounds; |
| 476 | scissorMode = ScissorMode::kNonScissored; |
| 477 | } else if (clippedPathIBounds.intersect(clipIBounds, pathIBounds)) { |
| 478 | scissorMode = ScissorMode::kScissored; |
| 479 | } else { |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 480 | fPerFlushPathParser->discardParsedPath(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 481 | return nullptr; |
| 482 | } |
| 483 | |
| 484 | SkIPoint16 atlasLocation; |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 485 | int h = clippedPathIBounds.height(), w = clippedPathIBounds.width(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 486 | if (fPerFlushAtlases.empty() || !fPerFlushAtlases.back().addRect(w, h, &atlasLocation)) { |
| 487 | if (!fPerFlushAtlases.empty()) { |
| 488 | // The atlas is out of room and can't grow any bigger. |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 489 | auto coverageCountBatchID = fPerFlushPathParser->closeCurrentBatch(); |
| 490 | fPerFlushAtlases.back().setCoverageCountBatchID(coverageCountBatchID); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 491 | } |
| 492 | fPerFlushAtlases.emplace_back(*onFlushRP->caps(), w, h).addRect(w, h, &atlasLocation); |
| 493 | } |
| 494 | |
| 495 | *atlasOffsetX = atlasLocation.x() - static_cast<int16_t>(clippedPathIBounds.left()); |
| 496 | *atlasOffsetY = atlasLocation.y() - static_cast<int16_t>(clippedPathIBounds.top()); |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 497 | fPerFlushPathParser->saveParsedPath(scissorMode, clippedPathIBounds, *atlasOffsetX, |
| 498 | *atlasOffsetY); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 499 | |
| 500 | return &fPerFlushAtlases.back(); |
| 501 | } |
| 502 | |
| 503 | void CCPR::DrawPathsOp::onExecute(GrOpFlushState* flushState) { |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 504 | SkASSERT(fCCPR->fFlushing); |
Greg Daniel | 500d58b | 2017-08-24 15:59:33 -0400 | [diff] [blame] | 505 | SkASSERT(flushState->rtCommandBuffer()); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 506 | |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 507 | if (!fCCPR->fPerFlushResourcesAreValid) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 508 | return; // Setup failed. |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 509 | } |
| 510 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 511 | SkASSERT(fBaseInstance >= 0); // Make sure setupResources has been called. |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 512 | |
Chris Dalton | d151322 | 2017-10-06 08:30:46 -0600 | [diff] [blame] | 513 | GrPipeline::InitArgs initArgs; |
| 514 | initArgs.fFlags = fSRGBFlags; |
| 515 | initArgs.fProxy = flushState->drawOpArgs().fProxy; |
| 516 | initArgs.fCaps = &flushState->caps(); |
| 517 | initArgs.fResourceProvider = flushState->resourceProvider(); |
| 518 | initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy; |
| 519 | GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip()); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 520 | |
| 521 | int baseInstance = fBaseInstance; |
| 522 | |
| 523 | for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) { |
| 524 | const AtlasBatch& batch = fAtlasBatches[i]; |
| 525 | SkASSERT(batch.fEndInstanceIdx > baseInstance); |
| 526 | |
| 527 | if (!batch.fAtlas->textureProxy()) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 528 | continue; // Atlas failed to allocate. |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 529 | } |
| 530 | |
Chris Dalton | 27059d3 | 2018-01-23 14:06:50 -0700 | [diff] [blame^] | 531 | GrCCPathProcessor pathProc(flushState->resourceProvider(), |
| 532 | sk_ref_sp(batch.fAtlas->textureProxy()), this->getFillType()); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 533 | |
Chris Dalton | 27059d3 | 2018-01-23 14:06:50 -0700 | [diff] [blame^] | 534 | GrMesh mesh(GrCCPathProcessor::MeshPrimitiveType(flushState->caps())); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 535 | mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(), |
Chris Dalton | 27059d3 | 2018-01-23 14:06:50 -0700 | [diff] [blame^] | 536 | GrCCPathProcessor::NumIndicesPerInstance(flushState->caps()), |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 537 | fCCPR->fPerFlushInstanceBuffer.get(), |
| 538 | batch.fEndInstanceIdx - baseInstance, baseInstance); |
| 539 | mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get()); |
| 540 | |
Chris Dalton | 27059d3 | 2018-01-23 14:06:50 -0700 | [diff] [blame^] | 541 | flushState->rtCommandBuffer()->draw(pipeline, pathProc, &mesh, nullptr, 1, this->bounds()); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 542 | } |
| 543 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 544 | SkASSERT(baseInstance == fBaseInstance + fInstanceCount - fNumSkippedInstances); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 545 | } |
| 546 | |
Chris Dalton | 3968ff9 | 2017-11-27 12:26:31 -0700 | [diff] [blame] | 547 | void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs, |
| 548 | int numOpListIDs) { |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 549 | SkASSERT(fFlushing); |
| 550 | fPerFlushAtlases.reset(); |
Chris Dalton | 9ca2784 | 2018-01-18 12:24:50 -0700 | [diff] [blame] | 551 | fPerFlushPathParser.reset(); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 552 | fPerFlushInstanceBuffer.reset(); |
| 553 | fPerFlushVertexBuffer.reset(); |
| 554 | fPerFlushIndexBuffer.reset(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 555 | // We wait to erase these until after flush, once Ops and FPs are done accessing their data. |
| 556 | for (int i = 0; i < numOpListIDs; ++i) { |
| 557 | fRTPendingPathsMap.erase(opListIDs[i]); |
| 558 | } |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 559 | SkDEBUGCODE(fFlushing = false); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 560 | } |