Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCoverageCountingPathRenderer_DEFINED |
| 9 | #define GrCoverageCountingPathRenderer_DEFINED |
| 10 | |
| 11 | #include "GrAllocator.h" |
| 12 | #include "GrOnFlushResourceProvider.h" |
| 13 | #include "GrPathRenderer.h" |
| 14 | #include "SkTInternalLList.h" |
| 15 | #include "ccpr/GrCCPRAtlas.h" |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 16 | #include "ccpr/GrCCPRCoverageOp.h" |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 17 | #include "ops/GrDrawOp.h" |
| 18 | #include <map> |
| 19 | |
| 20 | /** |
| 21 | * This is a path renderer that draws antialiased paths by counting coverage in an offscreen |
| 22 | * buffer. (See GrCCPRCoverageProcessor, GrCCPRPathProcessor) |
| 23 | * |
| 24 | * It also serves as the per-render-target tracker for pending path draws, and at the start of |
| 25 | * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths. |
| 26 | */ |
| 27 | class GrCoverageCountingPathRenderer |
| 28 | : public GrPathRenderer |
| 29 | , public GrOnFlushCallbackObject { |
| 30 | |
| 31 | struct RTPendingOps; |
| 32 | |
| 33 | public: |
| 34 | static bool IsSupported(const GrCaps&); |
| 35 | static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported(const GrCaps&); |
| 36 | |
| 37 | // GrPathRenderer overrides. |
| 38 | StencilSupport onGetStencilSupport(const GrShape&) const override { |
| 39 | return GrPathRenderer::kNoSupport_StencilSupport; |
| 40 | } |
Chris Dalton | 5ed4423 | 2017-09-07 13:22:46 -0600 | [diff] [blame] | 41 | CanDrawPath onCanDrawPath(const CanDrawPathArgs& args) const override; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 42 | bool onDrawPath(const DrawPathArgs&) final; |
| 43 | |
| 44 | // GrOnFlushCallbackObject overrides. |
| 45 | void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs, |
| 46 | SkTArray<sk_sp<GrRenderTargetContext>>* results) override; |
Jim Van Verth | 106b5c4 | 2017-09-26 12:45:29 -0400 | [diff] [blame] | 47 | void postFlush(GrDrawOpUploadToken) override; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 48 | |
| 49 | // This is the Op that ultimately draws a path into its final destination, using the atlas we |
| 50 | // generate at flush time. |
| 51 | class DrawPathsOp : public GrDrawOp { |
| 52 | public: |
| 53 | DEFINE_OP_CLASS_ID |
| 54 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(DrawPathsOp); |
| 55 | |
| 56 | DrawPathsOp(GrCoverageCountingPathRenderer*, const DrawPathArgs&, GrColor); |
| 57 | |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 58 | const char* name() const override { return "GrCoverageCountingPathRenderer::DrawPathsOp"; } |
Robert Phillips | 5f567c7 | 2017-09-14 08:27:37 -0400 | [diff] [blame] | 59 | |
Robert Phillips | f1748f5 | 2017-09-14 14:11:24 -0400 | [diff] [blame] | 60 | void visitProxies(const VisitProxyFunc& func) const override { |
Robert Phillips | 5f567c7 | 2017-09-14 08:27:37 -0400 | [diff] [blame] | 61 | fProcessors.visitProxies(func); |
| 62 | } |
| 63 | |
| 64 | // GrDrawOp overrides. |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 65 | FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; } |
Brian Osman | 9a725dd | 2017-09-20 09:53:22 -0400 | [diff] [blame] | 66 | RequiresDstTexture finalize(const GrCaps&, const GrAppliedClip*, |
| 67 | GrPixelConfigIsClamped) override; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 68 | void wasRecorded(GrRenderTargetOpList*) override; |
| 69 | bool onCombineIfPossible(GrOp* other, const GrCaps& caps) override; |
| 70 | void onPrepare(GrOpFlushState*) override {} |
| 71 | void onExecute(GrOpFlushState*) override; |
| 72 | |
| 73 | private: |
| 74 | SkPath::FillType getFillType() const { |
| 75 | SkASSERT(fDebugInstanceCount >= 1); |
| 76 | return fHeadDraw.fPath.getFillType(); |
| 77 | } |
| 78 | |
| 79 | struct SingleDraw { |
| 80 | using ScissorMode = GrCCPRCoverageOpsBuilder::ScissorMode; |
| 81 | SkIRect fClipBounds; |
| 82 | ScissorMode fScissorMode; |
| 83 | SkMatrix fMatrix; |
| 84 | SkPath fPath; |
| 85 | GrColor fColor; |
| 86 | SingleDraw* fNext = nullptr; |
| 87 | }; |
| 88 | |
| 89 | SingleDraw& getOnlyPathDraw() { |
| 90 | SkASSERT(&fHeadDraw == fTailDraw); |
| 91 | SkASSERT(1 == fDebugInstanceCount); |
| 92 | return fHeadDraw; |
| 93 | } |
| 94 | |
| 95 | struct AtlasBatch { |
| 96 | const GrCCPRAtlas* fAtlas; |
| 97 | int fEndInstanceIdx; |
| 98 | }; |
| 99 | |
| 100 | void addAtlasBatch(const GrCCPRAtlas* atlas, int endInstanceIdx) { |
| 101 | SkASSERT(endInstanceIdx > fBaseInstance); |
| 102 | SkASSERT(fAtlasBatches.empty() || |
| 103 | endInstanceIdx > fAtlasBatches.back().fEndInstanceIdx); |
| 104 | fAtlasBatches.push_back() = {atlas, endInstanceIdx}; |
| 105 | } |
| 106 | |
| 107 | GrCoverageCountingPathRenderer* const fCCPR; |
| 108 | const uint32_t fSRGBFlags; |
| 109 | GrProcessorSet fProcessors; |
| 110 | SingleDraw fHeadDraw; |
| 111 | SingleDraw* fTailDraw; |
| 112 | RTPendingOps* fOwningRTPendingOps; |
| 113 | int fBaseInstance; |
| 114 | SkDEBUGCODE(int fDebugInstanceCount;) |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 115 | SkDEBUGCODE(int fDebugSkippedInstances;) |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 116 | SkSTArray<1, AtlasBatch, true> fAtlasBatches; |
| 117 | |
| 118 | friend class GrCoverageCountingPathRenderer; |
| 119 | |
| 120 | typedef GrDrawOp INHERITED; |
| 121 | }; |
| 122 | |
| 123 | private: |
| 124 | GrCoverageCountingPathRenderer() = default; |
| 125 | |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 126 | void setupPerFlushResources(GrOnFlushResourceProvider*, const uint32_t* opListIDs, |
| 127 | int numOpListIDs, SkTArray<sk_sp<GrRenderTargetContext>>* results); |
| 128 | |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 129 | struct RTPendingOps { |
| 130 | SkTInternalLList<DrawPathsOp> fOpList; |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 131 | int fNumTotalPaths = 0; |
| 132 | int fNumSkPoints = 0; |
| 133 | int fNumSkVerbs = 0; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 134 | GrSTAllocator<256, DrawPathsOp::SingleDraw> fDrawsAllocator; |
| 135 | }; |
| 136 | |
| 137 | // Map from render target ID to the individual render target's pending path ops. |
| 138 | std::map<uint32_t, RTPendingOps> fRTPendingOpsMap; |
| 139 | |
| 140 | sk_sp<GrBuffer> fPerFlushIndexBuffer; |
| 141 | sk_sp<GrBuffer> fPerFlushVertexBuffer; |
| 142 | sk_sp<GrBuffer> fPerFlushInstanceBuffer; |
| 143 | GrSTAllocator<4, GrCCPRAtlas> fPerFlushAtlases; |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 144 | bool fPerFlushResourcesAreValid; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 145 | SkDEBUGCODE(bool fFlushing = false;) |
| 146 | }; |
| 147 | |
| 148 | #endif |