Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrCCDrawPathsOp.h" |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 9 | |
Brian Salomon | 653f42f | 2018-07-10 10:07:31 -0400 | [diff] [blame] | 10 | #include "GrContext.h" |
| 11 | #include "GrContextPriv.h" |
Robert Phillips | 7c525e6 | 2018-06-12 10:11:12 -0400 | [diff] [blame] | 12 | #include "GrMemoryPool.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 13 | #include "GrOpFlushState.h" |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 14 | #include "ccpr/GrCCPathCache.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 15 | #include "ccpr/GrCCPerFlushResources.h" |
| 16 | #include "ccpr/GrCoverageCountingPathRenderer.h" |
| 17 | |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 18 | static bool has_coord_transforms(const GrPaint& paint) { |
| 19 | GrFragmentProcessor::Iter iter(paint); |
| 20 | while (const GrFragmentProcessor* fp = iter.next()) { |
| 21 | if (!fp->coordTransforms().empty()) { |
| 22 | return true; |
| 23 | } |
| 24 | } |
| 25 | return false; |
| 26 | } |
| 27 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 28 | static int64_t area(const SkIRect& r) { |
| 29 | return sk_64_mul(r.height(), r.width()); |
| 30 | } |
| 31 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 32 | std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make( |
| 33 | GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, |
| 34 | GrPaint&& paint) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 35 | SkRect conservativeDevBounds; |
| 36 | m.mapRect(&conservativeDevBounds, shape.bounds()); |
| 37 | |
| 38 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 39 | float strokeDevWidth = 0; |
| 40 | float conservativeInflationRadius = 0; |
| 41 | if (!stroke.isFillStyle()) { |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 42 | strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth( |
| 43 | m, stroke, &conservativeInflationRadius); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 44 | conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius); |
| 45 | } |
| 46 | |
| 47 | std::unique_ptr<GrCCDrawPathsOp> op; |
| 48 | float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()); |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 49 | if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 50 | // The path is too large. Crop it or analytic AA can run out of fp32 precision. |
| 51 | SkPath croppedDevPath; |
| 52 | shape.asPath(&croppedDevPath); |
| 53 | croppedDevPath.transform(m, &croppedDevPath); |
| 54 | |
| 55 | SkIRect cropBox = clipIBounds; |
| 56 | GrShape croppedDevShape; |
| 57 | if (stroke.isFillStyle()) { |
| 58 | GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath); |
| 59 | croppedDevShape = GrShape(croppedDevPath); |
| 60 | conservativeDevBounds = croppedDevShape.bounds(); |
| 61 | } else { |
| 62 | int r = SkScalarCeilToInt(conservativeInflationRadius); |
| 63 | cropBox.outset(r, r); |
| 64 | GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath); |
| 65 | SkStrokeRec devStroke = stroke; |
| 66 | devStroke.setStrokeStyle(strokeDevWidth); |
| 67 | croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr)); |
| 68 | conservativeDevBounds = croppedDevPath.getBounds(); |
| 69 | conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius); |
| 70 | } |
| 71 | |
| 72 | // FIXME: This breaks local coords: http://skbug.com/8003 |
| 73 | return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth, |
| 74 | conservativeDevBounds, std::move(paint)); |
| 75 | } |
| 76 | |
| 77 | return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds, |
| 78 | std::move(paint)); |
| 79 | } |
| 80 | |
| 81 | std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake( |
| 82 | GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, |
| 83 | float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) { |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 84 | // The path itself should have been cropped if larger than kPathCropThreshold. If it had a |
| 85 | // stroke, that would have further inflated its draw bounds. |
| 86 | SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) < |
| 87 | GrCoverageCountingPathRenderer::kPathCropThreshold + |
| 88 | GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1); |
| 89 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 90 | SkIRect shapeConservativeIBounds; |
| 91 | conservativeDevBounds.roundOut(&shapeConservativeIBounds); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 92 | |
| 93 | SkIRect maskDevIBounds; |
| 94 | Visibility maskVisibility; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 95 | if (clipIBounds.contains(shapeConservativeIBounds)) { |
| 96 | maskDevIBounds = shapeConservativeIBounds; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 97 | maskVisibility = Visibility::kComplete; |
| 98 | } else { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 99 | if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 100 | return nullptr; |
| 101 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 102 | int64_t unclippedArea = area(shapeConservativeIBounds); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 103 | int64_t clippedArea = area(maskDevIBounds); |
| 104 | maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100) |
| 105 | ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the |
| 106 | // whole thing if we think we can cache it. |
| 107 | : Visibility::kPartial; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 108 | } |
Robert Phillips | c994a93 | 2018-06-19 13:09:54 -0400 | [diff] [blame] | 109 | |
| 110 | GrOpMemoryPool* pool = context->contextPriv().opMemoryPool(); |
| 111 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 112 | return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds, |
| 113 | maskDevIBounds, maskVisibility, conservativeDevBounds, |
| 114 | std::move(paint)); |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 115 | } |
| 116 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 117 | GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth, |
| 118 | const SkIRect& shapeConservativeIBounds, |
| 119 | const SkIRect& maskDevIBounds, Visibility maskVisibility, |
| 120 | const SkRect& conservativeDevBounds, GrPaint&& paint) |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 121 | : GrDrawOp(ClassID()) |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 122 | , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I()) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 123 | , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 124 | paint.getColor4f()) |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 125 | , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 126 | SkDEBUGCODE(fBaseInstance = -1); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 127 | // FIXME: intersect with clip bounds to (hopefully) improve batching. |
| 128 | // (This is nontrivial due to assumptions in generating the octagon cover geometry.) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 129 | this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 130 | } |
| 131 | |
| 132 | GrCCDrawPathsOp::~GrCCDrawPathsOp() { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 133 | if (fOwningPerOpListPaths) { |
Chris Dalton | b68bcc4 | 2018-09-14 00:44:22 -0600 | [diff] [blame] | 134 | // Remove the list's dangling pointer to this Op before deleting it. |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 135 | fOwningPerOpListPaths->fDrawOps.remove(this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 136 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 137 | } |
| 138 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 139 | GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 140 | float strokeDevWidth, |
| 141 | const SkIRect& shapeConservativeIBounds, |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 142 | const SkIRect& maskDevIBounds, Visibility maskVisibility, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 143 | const SkPMColor4f& color) |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 144 | : fMatrix(m) |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 145 | , fShape(shape) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 146 | , fStrokeDevWidth(strokeDevWidth) |
| 147 | , fShapeConservativeIBounds(shapeConservativeIBounds) |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 148 | , fMaskDevIBounds(maskDevIBounds) |
| 149 | , fMaskVisibility(maskVisibility) |
| 150 | , fColor(color) { |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 151 | #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 152 | if (fShape.hasUnstyledKey()) { |
| 153 | // On AOSP we round view matrix translates to integer values for cachable paths. We do this |
| 154 | // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths. |
| 155 | fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX())); |
| 156 | fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY())); |
| 157 | } |
| 158 | #endif |
| 159 | } |
| 160 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 161 | GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps, |
Brian Osman | 532b3f9 | 2018-07-11 10:02:07 -0400 | [diff] [blame] | 162 | const GrAppliedClip* clip) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 163 | SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now. |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 164 | SingleDraw* draw = &fDraws.head(); |
| 165 | |
| 166 | const GrProcessorSet::Analysis& analysis = fProcessors.finalize( |
| 167 | draw->fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps, |
| 168 | &draw->fColor); |
| 169 | |
| 170 | // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better |
| 171 | // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead |
| 172 | // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce |
| 173 | // artifacts from coverage dilation when there are self intersections. |
| 174 | if (analysis.isCompatibleWithCoverageAsAlpha() && |
| 175 | !draw->fShape.style().strokeRec().isFillStyle() && draw->fStrokeDevWidth < 1) { |
| 176 | // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else |
| 177 | // our next step would invalidate it. |
| 178 | SkASSERT(!draw->fCacheEntry); |
| 179 | SkASSERT(SkStrokeRec::kStroke_Style == draw->fShape.style().strokeRec().getStyle()); |
| 180 | |
| 181 | SkPath path; |
| 182 | draw->fShape.asPath(&path); |
| 183 | |
| 184 | // Create a hairline version of our stroke. |
| 185 | SkStrokeRec hairlineStroke = draw->fShape.style().strokeRec(); |
| 186 | hairlineStroke.setStrokeStyle(0); |
| 187 | |
| 188 | // How transparent does a 1px stroke have to be in order to appear as thin as the real one? |
Brian Osman | 4013913 | 2018-11-16 09:57:11 -0500 | [diff] [blame] | 189 | float coverage = draw->fStrokeDevWidth; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 190 | |
| 191 | draw->fShape = GrShape(path, GrStyle(hairlineStroke, nullptr)); |
| 192 | draw->fStrokeDevWidth = 1; |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 193 | |
| 194 | // TODO4F: Preserve float colors |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 195 | // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke. |
Brian Osman | 4013913 | 2018-11-16 09:57:11 -0500 | [diff] [blame] | 196 | draw->fColor = draw->fColor * coverage; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 197 | } |
| 198 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 199 | return RequiresDstTexture(analysis.requiresDstTexture()); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 200 | } |
| 201 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 202 | GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 203 | GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>(); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 204 | SkASSERT(fOwningPerOpListPaths); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 205 | SkASSERT(fNumDraws); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 206 | SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 207 | SkASSERT(that->fNumDraws); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 208 | |
Brian Osman | 9aa30c6 | 2018-07-02 15:21:46 -0400 | [diff] [blame] | 209 | if (fProcessors != that->fProcessors || |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 210 | fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) { |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 211 | return CombineResult::kCannotCombine; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 212 | } |
| 213 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 214 | fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 215 | |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 216 | SkDEBUGCODE(fNumDraws += that->fNumDraws); |
| 217 | SkDEBUGCODE(that->fNumDraws = 0); |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 218 | return CombineResult::kMerged; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 219 | } |
| 220 | |
Brian Salomon | 348a037 | 2018-10-31 10:42:18 -0400 | [diff] [blame] | 221 | void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) { |
Chris Dalton | f104fec | 2018-05-22 16:17:48 -0600 | [diff] [blame] | 222 | SkASSERT(1 == fNumDraws); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 223 | SkASSERT(!fOwningPerOpListPaths); |
Chris Dalton | b68bcc4 | 2018-09-14 00:44:22 -0600 | [diff] [blame] | 224 | fOwningPerOpListPaths = std::move(owningPerOpListPaths); |
| 225 | fOwningPerOpListPaths->fDrawOps.addToTail(this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 226 | } |
| 227 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 228 | void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache, |
| 229 | GrOnFlushResourceProvider* onFlushRP, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 230 | GrCCPerFlushResourceSpecs* specs) { |
| 231 | using CreateIfAbsent = GrCCPathCache::CreateIfAbsent; |
| 232 | using MaskTransform = GrCCPathCache::MaskTransform; |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 233 | using CoverageType = GrCCAtlas::CoverageType; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 234 | |
| 235 | for (SingleDraw& draw : fDraws) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 236 | SkPath path; |
| 237 | draw.fShape.asPath(&path); |
| 238 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 239 | SkASSERT(!draw.fCacheEntry); |
| 240 | |
| 241 | if (pathCache) { |
| 242 | MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift); |
| 243 | bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete; |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 244 | draw.fCacheEntry = |
| 245 | pathCache->find(onFlushRP, draw.fShape, m, CreateIfAbsent(canStashPathMask)); |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 246 | } |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 247 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 248 | if (draw.fCacheEntry) { |
| 249 | if (const GrCCCachedAtlas* cachedAtlas = draw.fCacheEntry->cachedAtlas()) { |
| 250 | SkASSERT(cachedAtlas->getOnFlushProxy()); |
| 251 | if (CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 252 | ++specs->fNumCachedPaths; |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 253 | } else { |
| 254 | // Suggest that this path be copied to a literal coverage atlas, to save memory. |
| 255 | // (The client may decline this copy via DoCopiesToA8Coverage::kNo.) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 256 | int idx = (draw.fShape.style().strokeRec().isFillStyle()) |
| 257 | ? GrCCPerFlushResourceSpecs::kFillIdx |
| 258 | : GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 259 | ++specs->fNumCopiedPaths[idx]; |
| 260 | specs->fCopyPathStats[idx].statPath(path); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 261 | specs->fCopyAtlasSpecs.accountForSpace( |
| 262 | draw.fCacheEntry->width(), draw.fCacheEntry->height()); |
| 263 | draw.fDoCopyToA8Coverage = true; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 264 | } |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 265 | continue; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 266 | } |
| 267 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 268 | if (Visibility::kMostlyComplete == draw.fMaskVisibility && |
| 269 | draw.fCacheEntry->hitCount() > 1) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 270 | int shapeSize = SkTMax(draw.fShapeConservativeIBounds.height(), |
| 271 | draw.fShapeConservativeIBounds.width()); |
| 272 | if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) { |
| 273 | // We've seen this path before with a compatible matrix, and it's mostly |
| 274 | // visible. Just render the whole mask so we can try to cache it. |
| 275 | draw.fMaskDevIBounds = draw.fShapeConservativeIBounds; |
| 276 | draw.fMaskVisibility = Visibility::kComplete; |
| 277 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 278 | } |
| 279 | } |
| 280 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 281 | int idx = (draw.fShape.style().strokeRec().isFillStyle()) |
| 282 | ? GrCCPerFlushResourceSpecs::kFillIdx |
| 283 | : GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 284 | ++specs->fNumRenderedPaths[idx]; |
| 285 | specs->fRenderedPathStats[idx].statPath(path); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 286 | specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(), |
| 287 | draw.fMaskDevIBounds.height()); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 288 | } |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 289 | } |
| 290 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 291 | void GrCCDrawPathsOp::setupResources( |
| 292 | GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, |
| 293 | GrCCPerFlushResources* resources, DoCopiesToA8Coverage doCopies) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 294 | using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill; |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 295 | SkASSERT(fNumDraws > 0); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 296 | SkASSERT(-1 == fBaseInstance); |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 297 | fBaseInstance = resources->nextPathInstanceIdx(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 298 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 299 | for (SingleDraw& draw : fDraws) { |
| 300 | SkPath path; |
| 301 | draw.fShape.asPath(&path); |
| 302 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 303 | auto doEvenOddFill = DoEvenOddFill(draw.fShape.style().strokeRec().isFillStyle() && |
| 304 | SkPath::kEvenOdd_FillType == path.getFillType()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 305 | SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() || |
| 306 | SkPath::kWinding_FillType == path.getFillType()); |
| 307 | |
| 308 | if (auto cacheEntry = draw.fCacheEntry.get()) { |
| 309 | // Does the path already exist in a cached atlas texture? |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 310 | if (cacheEntry->cachedAtlas()) { |
| 311 | SkASSERT(cacheEntry->cachedAtlas()->getOnFlushProxy()); |
| 312 | if (DoCopiesToA8Coverage::kYes == doCopies && draw.fDoCopyToA8Coverage) { |
| 313 | resources->upgradeEntryToLiteralCoverageAtlas(pathCache, onFlushRP, cacheEntry, |
| 314 | doEvenOddFill); |
| 315 | SkASSERT(cacheEntry->cachedAtlas()); |
| 316 | SkASSERT(GrCCAtlas::CoverageType::kA8_LiteralCoverage |
| 317 | == cacheEntry->cachedAtlas()->coverageType()); |
| 318 | SkASSERT(cacheEntry->cachedAtlas()->getOnFlushProxy()); |
| 319 | } |
| 320 | this->recordInstance(cacheEntry->cachedAtlas()->getOnFlushProxy(), |
| 321 | resources->nextPathInstanceIdx()); |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 322 | // TODO4F: Preserve float colors |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 323 | resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 324 | draw.fColor.toBytes_RGBA()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 325 | continue; |
| 326 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 327 | } |
| 328 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 329 | // Render the raw path into a coverage count atlas. renderShapeInAtlas() gives us two tight |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 330 | // bounding boxes: One in device space, as well as a second one rotated an additional 45 |
| 331 | // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that |
| 332 | // circumscribes the path. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 333 | SkRect devBounds, devBounds45; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 334 | SkIRect devIBounds; |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 335 | SkIVector devToAtlasOffset; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 336 | if (auto atlas = resources->renderShapeInAtlas( |
| 337 | draw.fMaskDevIBounds, draw.fMatrix, draw.fShape, draw.fStrokeDevWidth, |
| 338 | &devBounds, &devBounds45, &devIBounds, &devToAtlasOffset)) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 339 | this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx()); |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 340 | // TODO4F: Preserve float colors |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 341 | resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 342 | draw.fColor.toBytes_RGBA(), doEvenOddFill); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 343 | |
| 344 | // If we have a spot in the path cache, try to make a note of where this mask is so we |
| 345 | // can reuse it in the future. |
| 346 | if (auto cacheEntry = draw.fCacheEntry.get()) { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 347 | SkASSERT(!cacheEntry->cachedAtlas()); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 348 | |
| 349 | if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) { |
| 350 | // Don't cache a path mask unless it's completely visible with a hit count > 1. |
| 351 | // |
| 352 | // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to |
| 353 | // fully visible during accountForOwnPaths(). |
| 354 | continue; |
| 355 | } |
| 356 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 357 | cacheEntry->setCoverageCountAtlas(onFlushRP, atlas, devToAtlasOffset, devBounds, |
| 358 | devBounds45, devIBounds, draw.fCachedMaskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 359 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 360 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 361 | } |
| 362 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 363 | if (!fInstanceRanges.empty()) { |
| 364 | fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx(); |
| 365 | } |
| 366 | } |
| 367 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 368 | inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 369 | if (fInstanceRanges.empty()) { |
| 370 | fInstanceRanges.push_back({atlasProxy, instanceIdx}); |
| 371 | return; |
| 372 | } |
| 373 | if (fInstanceRanges.back().fAtlasProxy != atlasProxy) { |
| 374 | fInstanceRanges.back().fEndInstanceIdx = instanceIdx; |
| 375 | fInstanceRanges.push_back({atlasProxy, instanceIdx}); |
| 376 | return; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 377 | } |
| 378 | } |
| 379 | |
Brian Salomon | 588cec7 | 2018-11-14 13:56:37 -0500 | [diff] [blame] | 380 | void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 381 | SkASSERT(fOwningPerOpListPaths); |
Chris Dalton | f104fec | 2018-05-22 16:17:48 -0600 | [diff] [blame] | 382 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 383 | const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 384 | if (!resources) { |
| 385 | return; // Setup failed. |
| 386 | } |
| 387 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 388 | GrPipeline::InitArgs initArgs; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 389 | initArgs.fProxy = flushState->drawOpArgs().fProxy; |
| 390 | initArgs.fCaps = &flushState->caps(); |
| 391 | initArgs.fResourceProvider = flushState->resourceProvider(); |
| 392 | initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy; |
Brian Salomon | 4934890 | 2018-06-26 09:12:38 -0400 | [diff] [blame] | 393 | auto clip = flushState->detachAppliedClip(); |
| 394 | GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect()); |
| 395 | GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip)); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 396 | |
| 397 | int baseInstance = fBaseInstance; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 398 | SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 399 | |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 400 | for (const InstanceRange& range : fInstanceRanges) { |
| 401 | SkASSERT(range.fEndInstanceIdx > baseInstance); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 402 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 403 | GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords); |
| 404 | GrTextureProxy* atlasProxy = range.fAtlasProxy; |
| 405 | fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy; |
Brian Salomon | 4934890 | 2018-06-26 09:12:38 -0400 | [diff] [blame] | 406 | pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance, |
| 407 | range.fEndInstanceIdx, this->bounds()); |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 408 | |
| 409 | baseInstance = range.fEndInstanceIdx; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 410 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 411 | } |