Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrCCDrawPathsOp.h" |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 9 | |
Brian Salomon | 653f42f | 2018-07-10 10:07:31 -0400 | [diff] [blame] | 10 | #include "GrContext.h" |
| 11 | #include "GrContextPriv.h" |
Robert Phillips | 7c525e6 | 2018-06-12 10:11:12 -0400 | [diff] [blame] | 12 | #include "GrMemoryPool.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 13 | #include "GrOpFlushState.h" |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 14 | #include "ccpr/GrCCPathCache.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 15 | #include "ccpr/GrCCPerFlushResources.h" |
| 16 | #include "ccpr/GrCoverageCountingPathRenderer.h" |
| 17 | |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 18 | static bool has_coord_transforms(const GrPaint& paint) { |
| 19 | GrFragmentProcessor::Iter iter(paint); |
| 20 | while (const GrFragmentProcessor* fp = iter.next()) { |
| 21 | if (!fp->coordTransforms().empty()) { |
| 22 | return true; |
| 23 | } |
| 24 | } |
| 25 | return false; |
| 26 | } |
| 27 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 28 | static int64_t area(const SkIRect& r) { |
| 29 | return sk_64_mul(r.height(), r.width()); |
| 30 | } |
| 31 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 32 | std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make( |
| 33 | GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, |
| 34 | GrPaint&& paint) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 35 | SkRect conservativeDevBounds; |
| 36 | m.mapRect(&conservativeDevBounds, shape.bounds()); |
| 37 | |
| 38 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 39 | float strokeDevWidth = 0; |
| 40 | float conservativeInflationRadius = 0; |
| 41 | if (!stroke.isFillStyle()) { |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 42 | strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth( |
| 43 | m, stroke, &conservativeInflationRadius); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 44 | conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius); |
| 45 | } |
| 46 | |
| 47 | std::unique_ptr<GrCCDrawPathsOp> op; |
| 48 | float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()); |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 49 | if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 50 | // The path is too large. Crop it or analytic AA can run out of fp32 precision. |
| 51 | SkPath croppedDevPath; |
| 52 | shape.asPath(&croppedDevPath); |
| 53 | croppedDevPath.transform(m, &croppedDevPath); |
| 54 | |
| 55 | SkIRect cropBox = clipIBounds; |
| 56 | GrShape croppedDevShape; |
| 57 | if (stroke.isFillStyle()) { |
| 58 | GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath); |
| 59 | croppedDevShape = GrShape(croppedDevPath); |
| 60 | conservativeDevBounds = croppedDevShape.bounds(); |
| 61 | } else { |
| 62 | int r = SkScalarCeilToInt(conservativeInflationRadius); |
| 63 | cropBox.outset(r, r); |
| 64 | GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath); |
| 65 | SkStrokeRec devStroke = stroke; |
| 66 | devStroke.setStrokeStyle(strokeDevWidth); |
| 67 | croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr)); |
| 68 | conservativeDevBounds = croppedDevPath.getBounds(); |
| 69 | conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius); |
| 70 | } |
| 71 | |
| 72 | // FIXME: This breaks local coords: http://skbug.com/8003 |
| 73 | return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth, |
| 74 | conservativeDevBounds, std::move(paint)); |
| 75 | } |
| 76 | |
| 77 | return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds, |
| 78 | std::move(paint)); |
| 79 | } |
| 80 | |
| 81 | std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake( |
| 82 | GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, |
| 83 | float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) { |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 84 | // The path itself should have been cropped if larger than kPathCropThreshold. If it had a |
| 85 | // stroke, that would have further inflated its draw bounds. |
| 86 | SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) < |
| 87 | GrCoverageCountingPathRenderer::kPathCropThreshold + |
| 88 | GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1); |
| 89 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 90 | SkIRect shapeConservativeIBounds; |
| 91 | conservativeDevBounds.roundOut(&shapeConservativeIBounds); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 92 | |
| 93 | SkIRect maskDevIBounds; |
| 94 | Visibility maskVisibility; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 95 | if (clipIBounds.contains(shapeConservativeIBounds)) { |
| 96 | maskDevIBounds = shapeConservativeIBounds; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 97 | maskVisibility = Visibility::kComplete; |
| 98 | } else { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 99 | if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 100 | return nullptr; |
| 101 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 102 | int64_t unclippedArea = area(shapeConservativeIBounds); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 103 | int64_t clippedArea = area(maskDevIBounds); |
| 104 | maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100) |
| 105 | ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the |
| 106 | // whole thing if we think we can cache it. |
| 107 | : Visibility::kPartial; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 108 | } |
Robert Phillips | c994a93 | 2018-06-19 13:09:54 -0400 | [diff] [blame] | 109 | |
| 110 | GrOpMemoryPool* pool = context->contextPriv().opMemoryPool(); |
| 111 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 112 | return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds, |
| 113 | maskDevIBounds, maskVisibility, conservativeDevBounds, |
| 114 | std::move(paint)); |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 115 | } |
| 116 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 117 | GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth, |
| 118 | const SkIRect& shapeConservativeIBounds, |
| 119 | const SkIRect& maskDevIBounds, Visibility maskVisibility, |
| 120 | const SkRect& conservativeDevBounds, GrPaint&& paint) |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 121 | : GrDrawOp(ClassID()) |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 122 | , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I()) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 123 | , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 124 | paint.getColor4f()) |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 125 | , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 126 | SkDEBUGCODE(fBaseInstance = -1); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 127 | // FIXME: intersect with clip bounds to (hopefully) improve batching. |
| 128 | // (This is nontrivial due to assumptions in generating the octagon cover geometry.) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 129 | this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 130 | } |
| 131 | |
| 132 | GrCCDrawPathsOp::~GrCCDrawPathsOp() { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 133 | if (fOwningPerOpListPaths) { |
Chris Dalton | b68bcc4 | 2018-09-14 00:44:22 -0600 | [diff] [blame] | 134 | // Remove the list's dangling pointer to this Op before deleting it. |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 135 | fOwningPerOpListPaths->fDrawOps.remove(this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 136 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 137 | } |
| 138 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 139 | GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 140 | float strokeDevWidth, |
| 141 | const SkIRect& shapeConservativeIBounds, |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 142 | const SkIRect& maskDevIBounds, Visibility maskVisibility, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 143 | const SkPMColor4f& color) |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 144 | : fMatrix(m) |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 145 | , fShape(shape) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 146 | , fStrokeDevWidth(strokeDevWidth) |
| 147 | , fShapeConservativeIBounds(shapeConservativeIBounds) |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 148 | , fMaskDevIBounds(maskDevIBounds) |
| 149 | , fMaskVisibility(maskVisibility) |
| 150 | , fColor(color) { |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 151 | #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 152 | if (fShape.hasUnstyledKey()) { |
| 153 | // On AOSP we round view matrix translates to integer values for cachable paths. We do this |
| 154 | // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths. |
| 155 | fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX())); |
| 156 | fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY())); |
| 157 | } |
| 158 | #endif |
| 159 | } |
| 160 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 161 | GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps, |
Brian Osman | 532b3f9 | 2018-07-11 10:02:07 -0400 | [diff] [blame] | 162 | const GrAppliedClip* clip) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 163 | SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now. |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 164 | return fDraws.head().finalize(caps, clip, &fProcessors); |
| 165 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 166 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 167 | GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::SingleDraw::finalize( |
| 168 | const GrCaps& caps, const GrAppliedClip* clip, GrProcessorSet* processors) { |
| 169 | const GrProcessorSet::Analysis& analysis = processors->finalize( |
| 170 | fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps, |
| 171 | &fColor); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 172 | |
| 173 | // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better |
| 174 | // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead |
| 175 | // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce |
| 176 | // artifacts from coverage dilation when there are self intersections. |
| 177 | if (analysis.isCompatibleWithCoverageAsAlpha() && |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 178 | !fShape.style().strokeRec().isFillStyle() && fStrokeDevWidth < 1) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 179 | // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else |
| 180 | // our next step would invalidate it. |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 181 | SkASSERT(!fCacheEntry); |
| 182 | SkASSERT(SkStrokeRec::kStroke_Style == fShape.style().strokeRec().getStyle()); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 183 | |
| 184 | SkPath path; |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 185 | fShape.asPath(&path); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 186 | |
| 187 | // Create a hairline version of our stroke. |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 188 | SkStrokeRec hairlineStroke = fShape.style().strokeRec(); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 189 | hairlineStroke.setStrokeStyle(0); |
| 190 | |
| 191 | // How transparent does a 1px stroke have to be in order to appear as thin as the real one? |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 192 | float coverage = fStrokeDevWidth; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 193 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 194 | fShape = GrShape(path, GrStyle(hairlineStroke, nullptr)); |
| 195 | fStrokeDevWidth = 1; |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 196 | |
| 197 | // TODO4F: Preserve float colors |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 198 | // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke. |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 199 | fColor = fColor * coverage; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 200 | } |
| 201 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 202 | return RequiresDstTexture(analysis.requiresDstTexture()); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 203 | } |
| 204 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 205 | GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 206 | GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>(); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 207 | SkASSERT(fOwningPerOpListPaths); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 208 | SkASSERT(fNumDraws); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 209 | SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 210 | SkASSERT(that->fNumDraws); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 211 | |
Brian Osman | 9aa30c6 | 2018-07-02 15:21:46 -0400 | [diff] [blame] | 212 | if (fProcessors != that->fProcessors || |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 213 | fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) { |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 214 | return CombineResult::kCannotCombine; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 215 | } |
| 216 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 217 | fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 218 | |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 219 | SkDEBUGCODE(fNumDraws += that->fNumDraws); |
| 220 | SkDEBUGCODE(that->fNumDraws = 0); |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 221 | return CombineResult::kMerged; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 222 | } |
| 223 | |
Brian Salomon | 348a037 | 2018-10-31 10:42:18 -0400 | [diff] [blame] | 224 | void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) { |
Chris Dalton | f104fec | 2018-05-22 16:17:48 -0600 | [diff] [blame] | 225 | SkASSERT(1 == fNumDraws); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 226 | SkASSERT(!fOwningPerOpListPaths); |
Chris Dalton | b68bcc4 | 2018-09-14 00:44:22 -0600 | [diff] [blame] | 227 | fOwningPerOpListPaths = std::move(owningPerOpListPaths); |
| 228 | fOwningPerOpListPaths->fDrawOps.addToTail(this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 229 | } |
| 230 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 231 | void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache, |
| 232 | GrOnFlushResourceProvider* onFlushRP, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 233 | GrCCPerFlushResourceSpecs* specs) { |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 234 | for (SingleDraw& draw : fDraws) { |
| 235 | draw.accountForOwnPath(pathCache, onFlushRP, specs); |
| 236 | } |
| 237 | } |
| 238 | |
| 239 | void GrCCDrawPathsOp::SingleDraw::accountForOwnPath( |
| 240 | GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, |
| 241 | GrCCPerFlushResourceSpecs* specs) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 242 | using CreateIfAbsent = GrCCPathCache::CreateIfAbsent; |
| 243 | using MaskTransform = GrCCPathCache::MaskTransform; |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 244 | using CoverageType = GrCCAtlas::CoverageType; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 245 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 246 | SkPath path; |
| 247 | fShape.asPath(&path); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 248 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 249 | SkASSERT(!fCacheEntry); |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 250 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 251 | if (pathCache) { |
| 252 | MaskTransform m(fMatrix, &fCachedMaskShift); |
| 253 | bool canStashPathMask = fMaskVisibility >= Visibility::kMostlyComplete; |
| 254 | fCacheEntry = pathCache->find(onFlushRP, fShape, m, CreateIfAbsent(canStashPathMask)); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 255 | } |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 256 | |
| 257 | if (fCacheEntry) { |
| 258 | if (const GrCCCachedAtlas* cachedAtlas = fCacheEntry->cachedAtlas()) { |
| 259 | SkASSERT(cachedAtlas->getOnFlushProxy()); |
| 260 | if (CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) { |
| 261 | ++specs->fNumCachedPaths; |
| 262 | } else { |
| 263 | // Suggest that this path be copied to a literal coverage atlas, to save memory. |
| 264 | // (The client may decline this copy via DoCopiesToA8Coverage::kNo.) |
| 265 | int idx = (fShape.style().strokeRec().isFillStyle()) |
| 266 | ? GrCCPerFlushResourceSpecs::kFillIdx |
| 267 | : GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 268 | ++specs->fNumCopiedPaths[idx]; |
| 269 | specs->fCopyPathStats[idx].statPath(path); |
| 270 | specs->fCopyAtlasSpecs.accountForSpace(fCacheEntry->width(), fCacheEntry->height()); |
| 271 | fDoCopyToA8Coverage = true; |
| 272 | } |
| 273 | return; |
| 274 | } |
| 275 | |
| 276 | if (Visibility::kMostlyComplete == fMaskVisibility && fCacheEntry->hitCount() > 1) { |
| 277 | int shapeSize = SkTMax(fShapeConservativeIBounds.height(), |
| 278 | fShapeConservativeIBounds.width()); |
| 279 | if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) { |
| 280 | // We've seen this path before with a compatible matrix, and it's mostly |
| 281 | // visible. Just render the whole mask so we can try to cache it. |
| 282 | fMaskDevIBounds = fShapeConservativeIBounds; |
| 283 | fMaskVisibility = Visibility::kComplete; |
| 284 | } |
| 285 | } |
| 286 | } |
| 287 | |
| 288 | int idx = (fShape.style().strokeRec().isFillStyle()) |
| 289 | ? GrCCPerFlushResourceSpecs::kFillIdx |
| 290 | : GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 291 | ++specs->fNumRenderedPaths[idx]; |
| 292 | specs->fRenderedPathStats[idx].statPath(path); |
| 293 | specs->fRenderedAtlasSpecs.accountForSpace(fMaskDevIBounds.width(), |
| 294 | fMaskDevIBounds.height()); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 295 | } |
| 296 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 297 | void GrCCDrawPathsOp::setupResources( |
| 298 | GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, |
| 299 | GrCCPerFlushResources* resources, DoCopiesToA8Coverage doCopies) { |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 300 | SkASSERT(fNumDraws > 0); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 301 | SkASSERT(-1 == fBaseInstance); |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 302 | fBaseInstance = resources->nextPathInstanceIdx(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 303 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 304 | for (SingleDraw& draw : fDraws) { |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 305 | draw.setupResources(pathCache, onFlushRP, resources, doCopies, this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 306 | } |
| 307 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 308 | if (!fInstanceRanges.empty()) { |
| 309 | fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx(); |
| 310 | } |
| 311 | } |
| 312 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame^] | 313 | void GrCCDrawPathsOp::SingleDraw::setupResources( |
| 314 | GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, |
| 315 | GrCCPerFlushResources* resources, DoCopiesToA8Coverage doCopies, GrCCDrawPathsOp* op) { |
| 316 | using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill; |
| 317 | |
| 318 | SkPath path; |
| 319 | fShape.asPath(&path); |
| 320 | |
| 321 | auto doEvenOddFill = DoEvenOddFill(fShape.style().strokeRec().isFillStyle() && |
| 322 | SkPath::kEvenOdd_FillType == path.getFillType()); |
| 323 | SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() || |
| 324 | SkPath::kWinding_FillType == path.getFillType()); |
| 325 | |
| 326 | if (fCacheEntry) { |
| 327 | // Does the path already exist in a cached atlas texture? |
| 328 | if (fCacheEntry->cachedAtlas()) { |
| 329 | SkASSERT(fCacheEntry->cachedAtlas()->getOnFlushProxy()); |
| 330 | if (DoCopiesToA8Coverage::kYes == doCopies && fDoCopyToA8Coverage) { |
| 331 | resources->upgradeEntryToLiteralCoverageAtlas(pathCache, onFlushRP, |
| 332 | fCacheEntry.get(), doEvenOddFill); |
| 333 | SkASSERT(fCacheEntry->cachedAtlas()); |
| 334 | SkASSERT(GrCCAtlas::CoverageType::kA8_LiteralCoverage |
| 335 | == fCacheEntry->cachedAtlas()->coverageType()); |
| 336 | SkASSERT(fCacheEntry->cachedAtlas()->getOnFlushProxy()); |
| 337 | } |
| 338 | op->recordInstance(fCacheEntry->cachedAtlas()->getOnFlushProxy(), |
| 339 | resources->nextPathInstanceIdx()); |
| 340 | // TODO4F: Preserve float colors |
| 341 | resources->appendDrawPathInstance().set(*fCacheEntry, fCachedMaskShift, |
| 342 | fColor.toBytes_RGBA()); |
| 343 | return; |
| 344 | } |
| 345 | } |
| 346 | |
| 347 | // Render the raw path into a coverage count atlas. renderShapeInAtlas() gives us two tight |
| 348 | // bounding boxes: One in device space, as well as a second one rotated an additional 45 |
| 349 | // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that |
| 350 | // circumscribes the path. |
| 351 | SkRect devBounds, devBounds45; |
| 352 | SkIRect devIBounds; |
| 353 | SkIVector devToAtlasOffset; |
| 354 | if (auto atlas = resources->renderShapeInAtlas( |
| 355 | fMaskDevIBounds, fMatrix, fShape, fStrokeDevWidth, &devBounds, &devBounds45, |
| 356 | &devIBounds, &devToAtlasOffset)) { |
| 357 | op->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx()); |
| 358 | // TODO4F: Preserve float colors |
| 359 | resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset, |
| 360 | fColor.toBytes_RGBA(), doEvenOddFill); |
| 361 | |
| 362 | // If we have a spot in the path cache, try to make a note of where this mask is so we |
| 363 | // can reuse it in the future. |
| 364 | if (fCacheEntry) { |
| 365 | SkASSERT(!fCacheEntry->cachedAtlas()); |
| 366 | |
| 367 | if (Visibility::kComplete != fMaskVisibility || fCacheEntry->hitCount() <= 1) { |
| 368 | // Don't cache a path mask unless it's completely visible with a hit count > 1. |
| 369 | // |
| 370 | // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to |
| 371 | // fully visible during accountForOwnPaths(). |
| 372 | return; |
| 373 | } |
| 374 | |
| 375 | fCacheEntry->setCoverageCountAtlas(onFlushRP, atlas, devToAtlasOffset, devBounds, |
| 376 | devBounds45, devIBounds, fCachedMaskShift); |
| 377 | } |
| 378 | } |
| 379 | } |
| 380 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 381 | inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 382 | if (fInstanceRanges.empty()) { |
| 383 | fInstanceRanges.push_back({atlasProxy, instanceIdx}); |
| 384 | return; |
| 385 | } |
| 386 | if (fInstanceRanges.back().fAtlasProxy != atlasProxy) { |
| 387 | fInstanceRanges.back().fEndInstanceIdx = instanceIdx; |
| 388 | fInstanceRanges.push_back({atlasProxy, instanceIdx}); |
| 389 | return; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 390 | } |
| 391 | } |
| 392 | |
Brian Salomon | 588cec7 | 2018-11-14 13:56:37 -0500 | [diff] [blame] | 393 | void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 394 | SkASSERT(fOwningPerOpListPaths); |
Chris Dalton | f104fec | 2018-05-22 16:17:48 -0600 | [diff] [blame] | 395 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 396 | const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 397 | if (!resources) { |
| 398 | return; // Setup failed. |
| 399 | } |
| 400 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 401 | GrPipeline::InitArgs initArgs; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 402 | initArgs.fProxy = flushState->drawOpArgs().fProxy; |
| 403 | initArgs.fCaps = &flushState->caps(); |
| 404 | initArgs.fResourceProvider = flushState->resourceProvider(); |
| 405 | initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy; |
Brian Salomon | 4934890 | 2018-06-26 09:12:38 -0400 | [diff] [blame] | 406 | auto clip = flushState->detachAppliedClip(); |
| 407 | GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect()); |
| 408 | GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip)); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 409 | |
| 410 | int baseInstance = fBaseInstance; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 411 | SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 412 | |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 413 | for (const InstanceRange& range : fInstanceRanges) { |
| 414 | SkASSERT(range.fEndInstanceIdx > baseInstance); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 415 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 416 | GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords); |
| 417 | GrTextureProxy* atlasProxy = range.fAtlasProxy; |
| 418 | fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy; |
Brian Salomon | 4934890 | 2018-06-26 09:12:38 -0400 | [diff] [blame] | 419 | pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance, |
| 420 | range.fEndInstanceIdx, this->bounds()); |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 421 | |
| 422 | baseInstance = range.fEndInstanceIdx; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 423 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 424 | } |