Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrCCDrawPathsOp.h" |
Brian Salomon | 653f42f | 2018-07-10 10:07:31 -0400 | [diff] [blame] | 9 | #include "GrContext.h" |
| 10 | #include "GrContextPriv.h" |
Robert Phillips | 7c525e6 | 2018-06-12 10:11:12 -0400 | [diff] [blame] | 11 | #include "GrMemoryPool.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 12 | #include "GrOpFlushState.h" |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 13 | #include "ccpr/GrCCPathCache.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 14 | #include "ccpr/GrCCPerFlushResources.h" |
| 15 | #include "ccpr/GrCoverageCountingPathRenderer.h" |
| 16 | |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 17 | static bool has_coord_transforms(const GrPaint& paint) { |
| 18 | GrFragmentProcessor::Iter iter(paint); |
| 19 | while (const GrFragmentProcessor* fp = iter.next()) { |
| 20 | if (!fp->coordTransforms().empty()) { |
| 21 | return true; |
| 22 | } |
| 23 | } |
| 24 | return false; |
| 25 | } |
| 26 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 27 | static int64_t area(const SkIRect& r) { |
| 28 | return sk_64_mul(r.height(), r.width()); |
| 29 | } |
| 30 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 31 | std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make( |
| 32 | GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, |
| 33 | GrPaint&& paint) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 34 | SkRect conservativeDevBounds; |
| 35 | m.mapRect(&conservativeDevBounds, shape.bounds()); |
| 36 | |
| 37 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 38 | float strokeDevWidth = 0; |
| 39 | float conservativeInflationRadius = 0; |
| 40 | if (!stroke.isFillStyle()) { |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 41 | strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth( |
| 42 | m, stroke, &conservativeInflationRadius); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 43 | conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius); |
| 44 | } |
| 45 | |
| 46 | std::unique_ptr<GrCCDrawPathsOp> op; |
| 47 | float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()); |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 48 | if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 49 | // The path is too large. Crop it or analytic AA can run out of fp32 precision. |
| 50 | SkPath croppedDevPath; |
| 51 | shape.asPath(&croppedDevPath); |
| 52 | croppedDevPath.transform(m, &croppedDevPath); |
| 53 | |
| 54 | SkIRect cropBox = clipIBounds; |
| 55 | GrShape croppedDevShape; |
| 56 | if (stroke.isFillStyle()) { |
| 57 | GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath); |
| 58 | croppedDevShape = GrShape(croppedDevPath); |
| 59 | conservativeDevBounds = croppedDevShape.bounds(); |
| 60 | } else { |
| 61 | int r = SkScalarCeilToInt(conservativeInflationRadius); |
| 62 | cropBox.outset(r, r); |
| 63 | GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath); |
| 64 | SkStrokeRec devStroke = stroke; |
| 65 | devStroke.setStrokeStyle(strokeDevWidth); |
| 66 | croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr)); |
| 67 | conservativeDevBounds = croppedDevPath.getBounds(); |
| 68 | conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius); |
| 69 | } |
| 70 | |
| 71 | // FIXME: This breaks local coords: http://skbug.com/8003 |
| 72 | return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth, |
| 73 | conservativeDevBounds, std::move(paint)); |
| 74 | } |
| 75 | |
| 76 | return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds, |
| 77 | std::move(paint)); |
| 78 | } |
| 79 | |
| 80 | std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake( |
| 81 | GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, |
| 82 | float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) { |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 83 | // The path itself should have been cropped if larger than kPathCropThreshold. If it had a |
| 84 | // stroke, that would have further inflated its draw bounds. |
| 85 | SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) < |
| 86 | GrCoverageCountingPathRenderer::kPathCropThreshold + |
| 87 | GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1); |
| 88 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 89 | SkIRect shapeConservativeIBounds; |
| 90 | conservativeDevBounds.roundOut(&shapeConservativeIBounds); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 91 | |
| 92 | SkIRect maskDevIBounds; |
| 93 | Visibility maskVisibility; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 94 | if (clipIBounds.contains(shapeConservativeIBounds)) { |
| 95 | maskDevIBounds = shapeConservativeIBounds; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 96 | maskVisibility = Visibility::kComplete; |
| 97 | } else { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 98 | if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 99 | return nullptr; |
| 100 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 101 | int64_t unclippedArea = area(shapeConservativeIBounds); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 102 | int64_t clippedArea = area(maskDevIBounds); |
| 103 | maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100) |
| 104 | ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the |
| 105 | // whole thing if we think we can cache it. |
| 106 | : Visibility::kPartial; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 107 | } |
Robert Phillips | c994a93 | 2018-06-19 13:09:54 -0400 | [diff] [blame] | 108 | |
| 109 | GrOpMemoryPool* pool = context->contextPriv().opMemoryPool(); |
| 110 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 111 | return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds, |
| 112 | maskDevIBounds, maskVisibility, conservativeDevBounds, |
| 113 | std::move(paint)); |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 114 | } |
| 115 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 116 | GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth, |
| 117 | const SkIRect& shapeConservativeIBounds, |
| 118 | const SkIRect& maskDevIBounds, Visibility maskVisibility, |
| 119 | const SkRect& conservativeDevBounds, GrPaint&& paint) |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 120 | : GrDrawOp(ClassID()) |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 121 | , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I()) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 122 | , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 123 | paint.getColor4f()) |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 124 | , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 125 | SkDEBUGCODE(fBaseInstance = -1); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 126 | // FIXME: intersect with clip bounds to (hopefully) improve batching. |
| 127 | // (This is nontrivial due to assumptions in generating the octagon cover geometry.) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 128 | this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 129 | } |
| 130 | |
| 131 | GrCCDrawPathsOp::~GrCCDrawPathsOp() { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 132 | if (fOwningPerOpListPaths) { |
Chris Dalton | b68bcc4 | 2018-09-14 00:44:22 -0600 | [diff] [blame] | 133 | // Remove the list's dangling pointer to this Op before deleting it. |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 134 | fOwningPerOpListPaths->fDrawOps.remove(this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 135 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 136 | } |
| 137 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 138 | GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 139 | float strokeDevWidth, |
| 140 | const SkIRect& shapeConservativeIBounds, |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 141 | const SkIRect& maskDevIBounds, Visibility maskVisibility, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 142 | const SkPMColor4f& color) |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 143 | : fMatrix(m) |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 144 | , fShape(shape) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 145 | , fStrokeDevWidth(strokeDevWidth) |
| 146 | , fShapeConservativeIBounds(shapeConservativeIBounds) |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 147 | , fMaskDevIBounds(maskDevIBounds) |
| 148 | , fMaskVisibility(maskVisibility) |
| 149 | , fColor(color) { |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 150 | #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 151 | if (fShape.hasUnstyledKey()) { |
| 152 | // On AOSP we round view matrix translates to integer values for cachable paths. We do this |
| 153 | // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths. |
| 154 | fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX())); |
| 155 | fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY())); |
| 156 | } |
| 157 | #endif |
| 158 | } |
| 159 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 160 | GrCCDrawPathsOp::SingleDraw::~SingleDraw() { |
| 161 | if (fCacheEntry) { |
| 162 | // All currFlushAtlas references must be reset back to null before the flush is finished. |
| 163 | fCacheEntry->setCurrFlushAtlas(nullptr); |
| 164 | } |
| 165 | } |
| 166 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 167 | GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps, |
Brian Osman | 532b3f9 | 2018-07-11 10:02:07 -0400 | [diff] [blame] | 168 | const GrAppliedClip* clip) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 169 | SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now. |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 170 | SingleDraw* draw = &fDraws.head(); |
| 171 | |
| 172 | const GrProcessorSet::Analysis& analysis = fProcessors.finalize( |
| 173 | draw->fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps, |
| 174 | &draw->fColor); |
| 175 | |
| 176 | // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better |
| 177 | // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead |
| 178 | // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce |
| 179 | // artifacts from coverage dilation when there are self intersections. |
| 180 | if (analysis.isCompatibleWithCoverageAsAlpha() && |
| 181 | !draw->fShape.style().strokeRec().isFillStyle() && draw->fStrokeDevWidth < 1) { |
| 182 | // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else |
| 183 | // our next step would invalidate it. |
| 184 | SkASSERT(!draw->fCacheEntry); |
| 185 | SkASSERT(SkStrokeRec::kStroke_Style == draw->fShape.style().strokeRec().getStyle()); |
| 186 | |
| 187 | SkPath path; |
| 188 | draw->fShape.asPath(&path); |
| 189 | |
| 190 | // Create a hairline version of our stroke. |
| 191 | SkStrokeRec hairlineStroke = draw->fShape.style().strokeRec(); |
| 192 | hairlineStroke.setStrokeStyle(0); |
| 193 | |
| 194 | // How transparent does a 1px stroke have to be in order to appear as thin as the real one? |
| 195 | GrColor coverageAsAlpha = GrColorPackA4(SkScalarFloorToInt(draw->fStrokeDevWidth * 255)); |
| 196 | |
| 197 | draw->fShape = GrShape(path, GrStyle(hairlineStroke, nullptr)); |
| 198 | draw->fStrokeDevWidth = 1; |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 199 | |
| 200 | // TODO4F: Preserve float colors |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 201 | // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke. |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 202 | draw->fColor = SkPMColor4f::FromBytes_RGBA( |
| 203 | GrColorMul(draw->fColor.toBytes_RGBA(), coverageAsAlpha)); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 204 | } |
| 205 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 206 | return RequiresDstTexture(analysis.requiresDstTexture()); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 207 | } |
| 208 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 209 | GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 210 | GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>(); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 211 | SkASSERT(fOwningPerOpListPaths); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 212 | SkASSERT(fNumDraws); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 213 | SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 214 | SkASSERT(that->fNumDraws); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 215 | |
Brian Osman | 9aa30c6 | 2018-07-02 15:21:46 -0400 | [diff] [blame] | 216 | if (fProcessors != that->fProcessors || |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 217 | fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) { |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 218 | return CombineResult::kCannotCombine; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 219 | } |
| 220 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 221 | fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 222 | |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 223 | SkDEBUGCODE(fNumDraws += that->fNumDraws); |
| 224 | SkDEBUGCODE(that->fNumDraws = 0); |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 225 | return CombineResult::kMerged; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 226 | } |
| 227 | |
Brian Salomon | 348a037 | 2018-10-31 10:42:18 -0400 | [diff] [blame] | 228 | void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) { |
Chris Dalton | f104fec | 2018-05-22 16:17:48 -0600 | [diff] [blame] | 229 | SkASSERT(1 == fNumDraws); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 230 | SkASSERT(!fOwningPerOpListPaths); |
Chris Dalton | b68bcc4 | 2018-09-14 00:44:22 -0600 | [diff] [blame] | 231 | fOwningPerOpListPaths = std::move(owningPerOpListPaths); |
| 232 | fOwningPerOpListPaths->fDrawOps.addToTail(this); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 233 | } |
| 234 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 235 | void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache, |
| 236 | GrOnFlushResourceProvider* onFlushRP, |
| 237 | const GrUniqueKey& stashedAtlasKey, |
| 238 | GrCCPerFlushResourceSpecs* specs) { |
| 239 | using CreateIfAbsent = GrCCPathCache::CreateIfAbsent; |
| 240 | using MaskTransform = GrCCPathCache::MaskTransform; |
| 241 | |
| 242 | for (SingleDraw& draw : fDraws) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 243 | SkPath path; |
| 244 | draw.fShape.asPath(&path); |
| 245 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 246 | SkASSERT(!draw.fCacheEntry); |
| 247 | |
| 248 | if (pathCache) { |
| 249 | MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift); |
| 250 | bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete; |
| 251 | draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask)); |
| 252 | } |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 253 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 254 | if (auto cacheEntry = draw.fCacheEntry.get()) { |
| 255 | SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources(). |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 256 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 257 | if (cacheEntry->atlasKey().isValid()) { |
| 258 | // Does the path already exist in a cached atlas? |
| 259 | if (cacheEntry->hasCachedAtlas() && |
| 260 | (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey( |
| 261 | cacheEntry->atlasKey(), |
| 262 | GrCCAtlas::kTextureOrigin))) { |
| 263 | ++specs->fNumCachedPaths; |
| 264 | continue; |
| 265 | } |
| 266 | |
| 267 | // Does the path exist in the atlas that we stashed away from last flush? If so we |
| 268 | // can copy it into a new 8-bit atlas and keep it in the resource cache. |
| 269 | if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) { |
| 270 | SkASSERT(!cacheEntry->hasCachedAtlas()); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 271 | int idx = (draw.fShape.style().strokeRec().isFillStyle()) |
| 272 | ? GrCCPerFlushResourceSpecs::kFillIdx |
| 273 | : GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 274 | ++specs->fNumCopiedPaths[idx]; |
| 275 | specs->fCopyPathStats[idx].statPath(path); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 276 | specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(), |
| 277 | cacheEntry->height()); |
| 278 | continue; |
| 279 | } |
| 280 | |
| 281 | // Whatever atlas the path used to reside in, it no longer exists. |
| 282 | cacheEntry->resetAtlasKeyAndInfo(); |
| 283 | } |
| 284 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 285 | if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1) { |
| 286 | int shapeSize = SkTMax(draw.fShapeConservativeIBounds.height(), |
| 287 | draw.fShapeConservativeIBounds.width()); |
| 288 | if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) { |
| 289 | // We've seen this path before with a compatible matrix, and it's mostly |
| 290 | // visible. Just render the whole mask so we can try to cache it. |
| 291 | draw.fMaskDevIBounds = draw.fShapeConservativeIBounds; |
| 292 | draw.fMaskVisibility = Visibility::kComplete; |
| 293 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 294 | } |
| 295 | } |
| 296 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 297 | int idx = (draw.fShape.style().strokeRec().isFillStyle()) |
| 298 | ? GrCCPerFlushResourceSpecs::kFillIdx |
| 299 | : GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 300 | ++specs->fNumRenderedPaths[idx]; |
| 301 | specs->fRenderedPathStats[idx].statPath(path); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 302 | specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(), |
| 303 | draw.fMaskDevIBounds.height()); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 304 | } |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 305 | } |
| 306 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 307 | void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP, |
| 308 | GrCCPerFlushResources* resources, DoCopiesToCache doCopies) { |
| 309 | using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill; |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 310 | SkASSERT(fNumDraws > 0); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 311 | SkASSERT(-1 == fBaseInstance); |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 312 | fBaseInstance = resources->nextPathInstanceIdx(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 313 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 314 | for (SingleDraw& draw : fDraws) { |
| 315 | SkPath path; |
| 316 | draw.fShape.asPath(&path); |
| 317 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 318 | auto doEvenOddFill = DoEvenOddFill(draw.fShape.style().strokeRec().isFillStyle() && |
| 319 | SkPath::kEvenOdd_FillType == path.getFillType()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 320 | SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() || |
| 321 | SkPath::kWinding_FillType == path.getFillType()); |
| 322 | |
| 323 | if (auto cacheEntry = draw.fCacheEntry.get()) { |
| 324 | // Does the path already exist in a cached atlas texture? |
| 325 | if (auto proxy = draw.fCachedAtlasProxy.get()) { |
| 326 | SkASSERT(!cacheEntry->currFlushAtlas()); |
| 327 | this->recordInstance(proxy, resources->nextPathInstanceIdx()); |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 328 | // TODO4F: Preserve float colors |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 329 | resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 330 | draw.fColor.toBytes_RGBA()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 331 | continue; |
| 332 | } |
| 333 | |
| 334 | // Have we already encountered this path during the flush? (i.e. was the same SkPath |
| 335 | // drawn more than once during the same flush, with a compatible matrix?) |
| 336 | if (auto atlas = cacheEntry->currFlushAtlas()) { |
| 337 | this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx()); |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 338 | // TODO4F: Preserve float colors |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 339 | resources->appendDrawPathInstance().set( |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 340 | *cacheEntry, draw.fCachedMaskShift, draw.fColor.toBytes_RGBA(), |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 341 | cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill); |
| 342 | continue; |
| 343 | } |
| 344 | |
| 345 | // If the cache entry still has a valid atlas key at this point, it means the path |
| 346 | // exists in the atlas that we stashed away from last flush. Copy it into a permanent |
| 347 | // 8-bit atlas in the resource cache. |
| 348 | if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) { |
| 349 | SkIVector newOffset; |
| 350 | GrCCAtlas* atlas = |
| 351 | resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset); |
Chris Dalton | 8429c79 | 2018-10-23 15:56:22 -0600 | [diff] [blame] | 352 | cacheEntry->updateToCachedAtlas( |
| 353 | atlas->getOrAssignUniqueKey(onFlushRP), newOffset, |
| 354 | atlas->refOrMakeCachedAtlasInfo(onFlushRP->contextUniqueID())); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 355 | this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx()); |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 356 | // TODO4F: Preserve float colors |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 357 | resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 358 | draw.fColor.toBytes_RGBA()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 359 | // Remember this atlas in case we encounter the path again during the same flush. |
| 360 | cacheEntry->setCurrFlushAtlas(atlas); |
| 361 | continue; |
| 362 | } |
| 363 | } |
| 364 | |
| 365 | // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight |
| 366 | // bounding boxes: One in device space, as well as a second one rotated an additional 45 |
| 367 | // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that |
| 368 | // circumscribes the path. |
| 369 | SkASSERT(!draw.fCachedAtlasProxy); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 370 | SkRect devBounds, devBounds45; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 371 | SkIRect devIBounds; |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 372 | SkIVector devToAtlasOffset; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 373 | if (auto atlas = resources->renderShapeInAtlas( |
| 374 | draw.fMaskDevIBounds, draw.fMatrix, draw.fShape, draw.fStrokeDevWidth, |
| 375 | &devBounds, &devBounds45, &devIBounds, &devToAtlasOffset)) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 376 | this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx()); |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 377 | // TODO4F: Preserve float colors |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 378 | resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset, |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 379 | draw.fColor.toBytes_RGBA(), doEvenOddFill); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 380 | |
| 381 | // If we have a spot in the path cache, try to make a note of where this mask is so we |
| 382 | // can reuse it in the future. |
| 383 | if (auto cacheEntry = draw.fCacheEntry.get()) { |
| 384 | SkASSERT(!cacheEntry->hasCachedAtlas()); |
| 385 | |
| 386 | if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) { |
| 387 | // Don't cache a path mask unless it's completely visible with a hit count > 1. |
| 388 | // |
| 389 | // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to |
| 390 | // fully visible during accountForOwnPaths(). |
| 391 | continue; |
| 392 | } |
| 393 | |
| 394 | if (resources->nextAtlasToStash() != atlas) { |
| 395 | // This mask does not belong to the atlas that will be stashed for next flush. |
| 396 | continue; |
| 397 | } |
| 398 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 399 | const GrUniqueKey& atlasKey = |
| 400 | resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP); |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 401 | cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45, |
| 402 | devIBounds, draw.fCachedMaskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 403 | // Remember this atlas in case we encounter the path again during the same flush. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 404 | cacheEntry->setCurrFlushAtlas(atlas); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 405 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 406 | continue; |
| 407 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 408 | } |
| 409 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 410 | if (!fInstanceRanges.empty()) { |
| 411 | fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx(); |
| 412 | } |
| 413 | } |
| 414 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 415 | inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 416 | if (fInstanceRanges.empty()) { |
| 417 | fInstanceRanges.push_back({atlasProxy, instanceIdx}); |
| 418 | return; |
| 419 | } |
| 420 | if (fInstanceRanges.back().fAtlasProxy != atlasProxy) { |
| 421 | fInstanceRanges.back().fEndInstanceIdx = instanceIdx; |
| 422 | fInstanceRanges.push_back({atlasProxy, instanceIdx}); |
| 423 | return; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 424 | } |
| 425 | } |
| 426 | |
| 427 | void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 428 | SkASSERT(fOwningPerOpListPaths); |
Chris Dalton | f104fec | 2018-05-22 16:17:48 -0600 | [diff] [blame] | 429 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 430 | const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 431 | if (!resources) { |
| 432 | return; // Setup failed. |
| 433 | } |
| 434 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 435 | GrPipeline::InitArgs initArgs; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 436 | initArgs.fProxy = flushState->drawOpArgs().fProxy; |
| 437 | initArgs.fCaps = &flushState->caps(); |
| 438 | initArgs.fResourceProvider = flushState->resourceProvider(); |
| 439 | initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy; |
Brian Salomon | 4934890 | 2018-06-26 09:12:38 -0400 | [diff] [blame] | 440 | auto clip = flushState->detachAppliedClip(); |
| 441 | GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect()); |
| 442 | GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip)); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 443 | |
| 444 | int baseInstance = fBaseInstance; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 445 | SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 446 | |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 447 | for (const InstanceRange& range : fInstanceRanges) { |
| 448 | SkASSERT(range.fEndInstanceIdx > baseInstance); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 449 | |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 450 | GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords); |
| 451 | GrTextureProxy* atlasProxy = range.fAtlasProxy; |
| 452 | fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy; |
Brian Salomon | 4934890 | 2018-06-26 09:12:38 -0400 | [diff] [blame] | 453 | pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance, |
| 454 | range.fEndInstanceIdx, this->bounds()); |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 455 | |
| 456 | baseInstance = range.fEndInstanceIdx; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 457 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 458 | } |