blob: 6025581da8188e41f27a62e4b1c1bd50ea6c33f8 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCoverageCountingPathRenderer_DEFINED
9#define GrCoverageCountingPathRenderer_DEFINED
10
11#include "GrAllocator.h"
12#include "GrOnFlushResourceProvider.h"
13#include "GrPathRenderer.h"
14#include "SkTInternalLList.h"
15#include "ccpr/GrCCPRAtlas.h"
Chris Daltonc1e59632017-09-05 00:30:07 -060016#include "ccpr/GrCCPRCoverageOp.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060017#include "ops/GrDrawOp.h"
18#include <map>
19
20/**
21 * This is a path renderer that draws antialiased paths by counting coverage in an offscreen
22 * buffer. (See GrCCPRCoverageProcessor, GrCCPRPathProcessor)
23 *
24 * It also serves as the per-render-target tracker for pending path draws, and at the start of
25 * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths.
26 */
27class GrCoverageCountingPathRenderer
28 : public GrPathRenderer
29 , public GrOnFlushCallbackObject {
30
31 struct RTPendingOps;
32
33public:
34 static bool IsSupported(const GrCaps&);
35 static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported(const GrCaps&);
36
37 // GrPathRenderer overrides.
38 StencilSupport onGetStencilSupport(const GrShape&) const override {
39 return GrPathRenderer::kNoSupport_StencilSupport;
40 }
Chris Dalton5ed44232017-09-07 13:22:46 -060041 CanDrawPath onCanDrawPath(const CanDrawPathArgs& args) const override;
Chris Dalton1a325d22017-07-14 15:17:41 -060042 bool onDrawPath(const DrawPathArgs&) final;
43
44 // GrOnFlushCallbackObject overrides.
45 void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs,
46 SkTArray<sk_sp<GrRenderTargetContext>>* results) override;
Jim Van Verth106b5c42017-09-26 12:45:29 -040047 void postFlush(GrDrawOpUploadToken) override;
Chris Dalton1a325d22017-07-14 15:17:41 -060048
49 // This is the Op that ultimately draws a path into its final destination, using the atlas we
50 // generate at flush time.
51 class DrawPathsOp : public GrDrawOp {
52 public:
53 DEFINE_OP_CLASS_ID
54 SK_DECLARE_INTERNAL_LLIST_INTERFACE(DrawPathsOp);
55
56 DrawPathsOp(GrCoverageCountingPathRenderer*, const DrawPathArgs&, GrColor);
57
Chris Dalton1a325d22017-07-14 15:17:41 -060058 const char* name() const override { return "GrCoverageCountingPathRenderer::DrawPathsOp"; }
Robert Phillips5f567c72017-09-14 08:27:37 -040059
Robert Phillipsf1748f52017-09-14 14:11:24 -040060 void visitProxies(const VisitProxyFunc& func) const override {
Robert Phillips5f567c72017-09-14 08:27:37 -040061 fProcessors.visitProxies(func);
62 }
63
64 // GrDrawOp overrides.
Chris Dalton1a325d22017-07-14 15:17:41 -060065 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Brian Osman9a725dd2017-09-20 09:53:22 -040066 RequiresDstTexture finalize(const GrCaps&, const GrAppliedClip*,
67 GrPixelConfigIsClamped) override;
Chris Dalton1a325d22017-07-14 15:17:41 -060068 void wasRecorded(GrRenderTargetOpList*) override;
69 bool onCombineIfPossible(GrOp* other, const GrCaps& caps) override;
70 void onPrepare(GrOpFlushState*) override {}
71 void onExecute(GrOpFlushState*) override;
72
73 private:
74 SkPath::FillType getFillType() const {
75 SkASSERT(fDebugInstanceCount >= 1);
76 return fHeadDraw.fPath.getFillType();
77 }
78
79 struct SingleDraw {
80 using ScissorMode = GrCCPRCoverageOpsBuilder::ScissorMode;
81 SkIRect fClipBounds;
82 ScissorMode fScissorMode;
83 SkMatrix fMatrix;
84 SkPath fPath;
85 GrColor fColor;
86 SingleDraw* fNext = nullptr;
87 };
88
89 SingleDraw& getOnlyPathDraw() {
90 SkASSERT(&fHeadDraw == fTailDraw);
91 SkASSERT(1 == fDebugInstanceCount);
92 return fHeadDraw;
93 }
94
95 struct AtlasBatch {
96 const GrCCPRAtlas* fAtlas;
97 int fEndInstanceIdx;
98 };
99
100 void addAtlasBatch(const GrCCPRAtlas* atlas, int endInstanceIdx) {
101 SkASSERT(endInstanceIdx > fBaseInstance);
102 SkASSERT(fAtlasBatches.empty() ||
103 endInstanceIdx > fAtlasBatches.back().fEndInstanceIdx);
104 fAtlasBatches.push_back() = {atlas, endInstanceIdx};
105 }
106
107 GrCoverageCountingPathRenderer* const fCCPR;
108 const uint32_t fSRGBFlags;
109 GrProcessorSet fProcessors;
110 SingleDraw fHeadDraw;
111 SingleDraw* fTailDraw;
112 RTPendingOps* fOwningRTPendingOps;
113 int fBaseInstance;
114 SkDEBUGCODE(int fDebugInstanceCount;)
Chris Daltonc1e59632017-09-05 00:30:07 -0600115 SkDEBUGCODE(int fDebugSkippedInstances;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600116 SkSTArray<1, AtlasBatch, true> fAtlasBatches;
117
118 friend class GrCoverageCountingPathRenderer;
119
120 typedef GrDrawOp INHERITED;
121 };
122
123private:
124 GrCoverageCountingPathRenderer() = default;
125
Chris Daltonc1e59632017-09-05 00:30:07 -0600126 void setupPerFlushResources(GrOnFlushResourceProvider*, const uint32_t* opListIDs,
127 int numOpListIDs, SkTArray<sk_sp<GrRenderTargetContext>>* results);
128
Chris Dalton1a325d22017-07-14 15:17:41 -0600129 struct RTPendingOps {
130 SkTInternalLList<DrawPathsOp> fOpList;
Chris Daltonc1e59632017-09-05 00:30:07 -0600131 int fNumTotalPaths = 0;
132 int fNumSkPoints = 0;
133 int fNumSkVerbs = 0;
Chris Dalton1a325d22017-07-14 15:17:41 -0600134 GrSTAllocator<256, DrawPathsOp::SingleDraw> fDrawsAllocator;
135 };
136
137 // Map from render target ID to the individual render target's pending path ops.
138 std::map<uint32_t, RTPendingOps> fRTPendingOpsMap;
139
140 sk_sp<GrBuffer> fPerFlushIndexBuffer;
141 sk_sp<GrBuffer> fPerFlushVertexBuffer;
142 sk_sp<GrBuffer> fPerFlushInstanceBuffer;
143 GrSTAllocator<4, GrCCPRAtlas> fPerFlushAtlases;
Chris Daltonc1e59632017-09-05 00:30:07 -0600144 bool fPerFlushResourcesAreValid;
Chris Dalton1a325d22017-07-14 15:17:41 -0600145 SkDEBUGCODE(bool fFlushing = false;)
146};
147
148#endif