Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCDrawPathsOp_DEFINED |
| 9 | #define GrCCDrawPathsOp_DEFINED |
| 10 | |
Ben Wagner | 729a23f | 2019-05-17 16:29:34 -0400 | [diff] [blame] | 11 | #include "src/core/SkTInternalLList.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 12 | #include "src/gpu/ccpr/GrCCPathCache.h" |
| 13 | #include "src/gpu/ccpr/GrCCSTLList.h" |
Michael Ludwig | 663afe5 | 2019-06-03 16:46:19 -0400 | [diff] [blame] | 14 | #include "src/gpu/geometry/GrShape.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 15 | #include "src/gpu/ops/GrDrawOp.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 16 | |
Robert Phillips | 6f0e02f | 2019-02-13 11:02:28 -0500 | [diff] [blame] | 17 | class GrCCAtlas; |
| 18 | class GrCCPerFlushResources; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 19 | struct GrCCPerFlushResourceSpecs; |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 20 | struct GrCCPerOpsTaskPaths; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 21 | class GrOnFlushResourceProvider; |
Robert Phillips | 6f0e02f | 2019-02-13 11:02:28 -0500 | [diff] [blame] | 22 | class GrRecordingContext; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 23 | |
| 24 | /** |
| 25 | * This is the Op that draws paths to the actual canvas, using atlases generated by CCPR. |
| 26 | */ |
| 27 | class GrCCDrawPathsOp : public GrDrawOp { |
| 28 | public: |
| 29 | DEFINE_OP_CLASS_ID |
| 30 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCDrawPathsOp); |
| 31 | |
Robert Phillips | 6f0e02f | 2019-02-13 11:02:28 -0500 | [diff] [blame] | 32 | static std::unique_ptr<GrCCDrawPathsOp> Make(GrRecordingContext*, const SkIRect& clipIBounds, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 33 | const SkMatrix&, const GrShape&, GrPaint&&); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 34 | ~GrCCDrawPathsOp() override; |
| 35 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 36 | const char* name() const override { return "GrCCDrawPathsOp"; } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 37 | FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; } |
Chris Dalton | 6ce447a | 2019-06-23 18:07:38 -0600 | [diff] [blame] | 38 | GrProcessorSet::Analysis finalize(const GrCaps&, const GrAppliedClip*, |
| 39 | bool hasMixedSampledCoverage, GrClampType) override; |
Michael Ludwig | 28b0c5d | 2019-12-19 14:51:00 -0500 | [diff] [blame] | 40 | CombineResult onCombineIfPossible(GrOp*, GrRecordingContext::Arenas*, const GrCaps&) override; |
Chris Dalton | 1706cbf | 2019-05-21 19:35:29 -0600 | [diff] [blame] | 41 | void visitProxies(const VisitProxyFunc& fn) const override { |
Chris Dalton | 45f6b3d | 2019-05-21 12:06:03 -0600 | [diff] [blame] | 42 | for (const auto& range : fInstanceRanges) { |
Chris Dalton | 7eb5c0f | 2019-05-23 15:15:47 -0600 | [diff] [blame] | 43 | fn(range.fAtlasProxy, GrMipMapped::kNo); |
Chris Dalton | 45f6b3d | 2019-05-21 12:06:03 -0600 | [diff] [blame] | 44 | } |
Brian Salomon | 7d94bb5 | 2018-10-12 14:37:19 -0400 | [diff] [blame] | 45 | fProcessors.visitProxies(fn); |
| 46 | } |
Greg Daniel | b20d7e5 | 2019-09-03 13:54:39 -0400 | [diff] [blame] | 47 | void onPrepare(GrOpFlushState*) override; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 48 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 49 | void addToOwningPerOpsTaskPaths(sk_sp<GrCCPerOpsTaskPaths> owningPerOpsTaskPaths); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 50 | |
| 51 | // Makes decisions about how to draw each path (cached, copied, rendered, etc.), and |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 52 | // increments/fills out the corresponding GrCCPerFlushResourceSpecs. |
| 53 | void accountForOwnPaths(GrCCPathCache*, GrOnFlushResourceProvider*, GrCCPerFlushResourceSpecs*); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 54 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 55 | // Allows the caller to decide whether to actually do the suggested copies from cached 16-bit |
| 56 | // coverage count atlases, and into 8-bit literal coverage atlases. Purely to save space. |
| 57 | enum class DoCopiesToA8Coverage : bool { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 58 | kNo = false, |
| 59 | kYes = true |
| 60 | }; |
| 61 | |
| 62 | // Allocates the GPU resources indicated by accountForOwnPaths(), in preparation for drawing. If |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 63 | // DoCopiesToA8Coverage is kNo, the paths slated for copy will instead be left in their 16-bit |
| 64 | // coverage count atlases. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 65 | // |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 66 | // NOTE: If using DoCopiesToA8Coverage::kNo, it is the caller's responsibility to have called |
| 67 | // cancelCopies() on the GrCCPerFlushResourceSpecs, prior to making this call. |
| 68 | void setupResources(GrCCPathCache*, GrOnFlushResourceProvider*, GrCCPerFlushResources*, |
| 69 | DoCopiesToA8Coverage); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 70 | |
Brian Salomon | 588cec7 | 2018-11-14 13:56:37 -0500 | [diff] [blame] | 71 | void onExecute(GrOpFlushState*, const SkRect& chainBounds) override; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 72 | |
| 73 | private: |
Robert Phillips | c655c3a | 2020-03-18 13:23:45 -0400 | [diff] [blame] | 74 | void onPrePrepare(GrRecordingContext*, |
Brian Salomon | 8afde5f | 2020-04-01 16:22:00 -0400 | [diff] [blame^] | 75 | const GrSurfaceProxyView* writeView, |
Robert Phillips | c655c3a | 2020-03-18 13:23:45 -0400 | [diff] [blame] | 76 | GrAppliedClip*, |
| 77 | const GrXferProcessor::DstProxyView&) override {} |
| 78 | |
Robert Phillips | 7c525e6 | 2018-06-12 10:11:12 -0400 | [diff] [blame] | 79 | friend class GrOpMemoryPool; |
| 80 | |
Robert Phillips | 6f0e02f | 2019-02-13 11:02:28 -0500 | [diff] [blame] | 81 | static std::unique_ptr<GrCCDrawPathsOp> InternalMake(GrRecordingContext*, |
| 82 | const SkIRect& clipIBounds, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 83 | const SkMatrix&, const GrShape&, |
| 84 | float strokeDevWidth, |
| 85 | const SkRect& conservativeDevBounds, |
| 86 | GrPaint&&); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 87 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 88 | GrCCDrawPathsOp(const SkMatrix&, const GrShape&, float strokeDevWidth, |
| 89 | const SkIRect& shapeConservativeIBounds, const SkIRect& maskDevIBounds, |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 90 | const SkRect& conservativeDevBounds, GrPaint&&); |
Robert Phillips | 88a32ef | 2018-06-07 11:05:56 -0400 | [diff] [blame] | 91 | |
Chris Dalton | 6a5317a | 2019-07-12 09:55:52 -0600 | [diff] [blame] | 92 | void recordInstance( |
| 93 | GrCCPathProcessor::CoverageMode, GrTextureProxy* atlasProxy, int instanceIdx); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 94 | |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 95 | const SkMatrix fViewMatrixIfUsingLocalCoords; |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 96 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame] | 97 | class SingleDraw { |
| 98 | public: |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 99 | SingleDraw(const SkMatrix&, const GrShape&, float strokeDevWidth, |
| 100 | const SkIRect& shapeConservativeIBounds, const SkIRect& maskDevIBounds, |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 101 | const SkPMColor4f&); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 102 | |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame] | 103 | // See the corresponding methods in GrCCDrawPathsOp. |
Chris Dalton | b8fff0d | 2019-03-05 10:11:58 -0700 | [diff] [blame] | 104 | GrProcessorSet::Analysis finalize( |
Chris Dalton | 6ce447a | 2019-06-23 18:07:38 -0600 | [diff] [blame] | 105 | const GrCaps&, const GrAppliedClip*, bool hasMixedSampledCoverage, GrClampType, |
| 106 | GrProcessorSet*); |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame] | 107 | void accountForOwnPath(GrCCPathCache*, GrOnFlushResourceProvider*, |
| 108 | GrCCPerFlushResourceSpecs*); |
| 109 | void setupResources(GrCCPathCache*, GrOnFlushResourceProvider*, GrCCPerFlushResources*, |
| 110 | DoCopiesToA8Coverage, GrCCDrawPathsOp*); |
| 111 | |
| 112 | private: |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 113 | bool shouldCachePathMask(int maxRenderTargetSize) const; |
| 114 | |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 115 | SkMatrix fMatrix; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 116 | GrShape fShape; |
| 117 | float fStrokeDevWidth; |
| 118 | const SkIRect fShapeConservativeIBounds; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 119 | SkIRect fMaskDevIBounds; |
Brian Osman | cf86085 | 2018-10-31 14:04:39 -0400 | [diff] [blame] | 120 | SkPMColor4f fColor; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 121 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 122 | GrCCPathCache::OnFlushEntryRef fCacheEntry; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 123 | SkIVector fCachedMaskShift; |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 124 | bool fDoCopyToA8Coverage = false; |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 125 | bool fDoCachePathMask = false; |
Chris Dalton | 9bc450b | 2019-07-21 19:34:52 -0600 | [diff] [blame] | 126 | SkDEBUGCODE(bool fWasCountedAsRender = false); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 127 | |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 128 | SingleDraw* fNext = nullptr; |
Chris Dalton | a13078c | 2019-01-07 09:34:05 -0700 | [diff] [blame] | 129 | |
| 130 | friend class GrCCSTLList<SingleDraw>; // To access fNext. |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 131 | }; |
| 132 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 133 | // Declare fOwningPerOpsTaskPaths first, before fDraws. The draws use memory allocated by |
| 134 | // fOwningPerOpsTaskPaths, so it must not be unreffed until after fDraws is destroyed. |
| 135 | sk_sp<GrCCPerOpsTaskPaths> fOwningPerOpsTaskPaths; |
Chris Dalton | dedf8f2 | 2018-09-24 20:23:47 -0600 | [diff] [blame] | 136 | |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 137 | GrCCSTLList<SingleDraw> fDraws; |
| 138 | SkDEBUGCODE(int fNumDraws = 1); |
| 139 | |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 140 | GrProcessorSet fProcessors; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 141 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 142 | struct InstanceRange { |
Chris Dalton | 6a5317a | 2019-07-12 09:55:52 -0600 | [diff] [blame] | 143 | GrCCPathProcessor::CoverageMode fCoverageMode; |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 144 | GrTextureProxy* fAtlasProxy; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 145 | int fEndInstanceIdx; |
| 146 | }; |
| 147 | |
| 148 | SkSTArray<2, InstanceRange, true> fInstanceRanges; |
| 149 | int fBaseInstance SkDEBUGCODE(= -1); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 150 | }; |
| 151 | |
| 152 | #endif |