blob: d446da78290e480bacaf8f98860a154ac85c3beb [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPerFlushResources_DEFINED
9#define GrCCPerFlushResources_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrNonAtomicRef.h"
12#include "src/gpu/ccpr/GrCCAtlas.h"
13#include "src/gpu/ccpr/GrCCFiller.h"
14#include "src/gpu/ccpr/GrCCPathProcessor.h"
15#include "src/gpu/ccpr/GrCCStroker.h"
Chris Daltonc3318f02019-07-19 14:20:53 -060016#include "src/gpu/ccpr/GrStencilAtlasOp.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060017
Chris Dalton351e80c2019-01-06 22:51:00 -070018class GrCCPathCache;
Chris Dalton4da70192018-06-18 09:51:36 -060019class GrCCPathCacheEntry;
Chris Dalton8610e9c2019-05-09 11:07:10 -060020class GrOctoBounds;
Chris Dalton4da70192018-06-18 09:51:36 -060021class GrOnFlushResourceProvider;
Chris Dalton09a7bb22018-08-31 19:53:15 +080022class GrShape;
23
24/**
25 * This struct counts values that help us preallocate buffers for rendered path geometry.
26 */
27struct GrCCRenderedPathStats {
28 int fMaxPointsPerPath = 0;
29 int fNumTotalSkPoints = 0;
30 int fNumTotalSkVerbs = 0;
31 int fNumTotalConicWeights = 0;
32
33 void statPath(const SkPath&);
34};
Chris Dalton4da70192018-06-18 09:51:36 -060035
Chris Dalton5ba36ba2018-05-09 01:08:38 -060036/**
Chris Dalton42c21152018-06-13 15:28:19 -060037 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
38 * CCPR in a given flush.
39 */
40struct GrCCPerFlushResourceSpecs {
Chris Dalton09a7bb22018-08-31 19:53:15 +080041 static constexpr int kFillIdx = 0;
42 static constexpr int kStrokeIdx = 1;
43
Chris Dalton4da70192018-06-18 09:51:36 -060044 int fNumCachedPaths = 0;
45
Chris Dalton09a7bb22018-08-31 19:53:15 +080046 int fNumCopiedPaths[2] = {0, 0};
47 GrCCRenderedPathStats fCopyPathStats[2];
Chris Dalton4da70192018-06-18 09:51:36 -060048 GrCCAtlas::Specs fCopyAtlasSpecs;
49
Chris Dalton09a7bb22018-08-31 19:53:15 +080050 int fNumRenderedPaths[2] = {0, 0};
Chris Dalton42c21152018-06-13 15:28:19 -060051 int fNumClipPaths = 0;
Chris Dalton09a7bb22018-08-31 19:53:15 +080052 GrCCRenderedPathStats fRenderedPathStats[2];
Chris Dalton4da70192018-06-18 09:51:36 -060053 GrCCAtlas::Specs fRenderedAtlasSpecs;
Chris Dalton42c21152018-06-13 15:28:19 -060054
Chris Dalton4da70192018-06-18 09:51:36 -060055 bool isEmpty() const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080056 return 0 == fNumCachedPaths + fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx] +
57 fNumRenderedPaths[kFillIdx] + fNumRenderedPaths[kStrokeIdx] + fNumClipPaths;
Chris Dalton4da70192018-06-18 09:51:36 -060058 }
Chris Dalton351e80c2019-01-06 22:51:00 -070059 // Converts the copies to normal cached draws.
60 void cancelCopies();
Chris Dalton42c21152018-06-13 15:28:19 -060061};
62
63/**
Chris Daltond7e22272018-05-23 10:17:17 -060064 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
Greg Danielf41b2bd2019-08-22 16:19:24 -040065 * preFlush() method, and referenced by all the GrCCPerOpsTaskPaths objects that are being flushed.
66 * It is deleted in postFlush() once all the flushing GrCCPerOpsTaskPaths objects are deleted.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060067 */
Chris Daltond7e22272018-05-23 10:17:17 -060068class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060069public:
Chris Daltonc3318f02019-07-19 14:20:53 -060070 GrCCPerFlushResources(
71 GrOnFlushResourceProvider*, GrCCAtlas::CoverageType,const GrCCPerFlushResourceSpecs&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060072
73 bool isMapped() const { return SkToBool(fPathInstanceData); }
74
Chris Daltonc3318f02019-07-19 14:20:53 -060075 GrCCAtlas::CoverageType renderedPathCoverageType() const {
76 return fRenderedAtlasStack.coverageType();
77 }
78
Chris Dalton351e80c2019-01-06 22:51:00 -070079 // Copies a coverage-counted path out of the given texture proxy, and into a cached, 8-bit,
80 // literal coverage atlas. Updates the cache entry to reference the new atlas.
81 void upgradeEntryToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
Chris Daltonc3318f02019-07-19 14:20:53 -060082 GrCCPathCacheEntry*, GrFillRule);
Chris Dalton4da70192018-06-18 09:51:36 -060083
Chris Dalton09a7bb22018-08-31 19:53:15 +080084 // These two methods render a path into a temporary coverage count atlas. See
Chris Dalton351e80c2019-01-06 22:51:00 -070085 // GrCCPathProcessor::Instance for a description of the outputs.
Chris Dalton09a7bb22018-08-31 19:53:15 +080086 //
87 // strokeDevWidth must be 0 for fills, 1 for hairlines, or the stroke width in device-space
88 // pixels for non-hairline strokes (implicitly requiring a rigid-body transform).
Chris Dalton8610e9c2019-05-09 11:07:10 -060089 GrCCAtlas* renderShapeInAtlas(
90 const SkIRect& clipIBounds, const SkMatrix&, const GrShape&, float strokeDevWidth,
91 GrOctoBounds*, SkIRect* devIBounds, SkIVector* devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -060092 const GrCCAtlas* renderDeviceSpacePathInAtlas(
93 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
94 GrFillRule fillRule, SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060095
Chris Dalton9414c962018-06-14 10:14:50 -060096 // Returns the index in instanceBuffer() of the next instance that will be added by
97 // appendDrawPathInstance().
Chris Daltondaef06a2018-05-23 17:11:09 -060098 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060099
Chris Dalton9414c962018-06-14 10:14:50 -0600100 // Appends an instance to instanceBuffer() that will draw a path to the destination render
101 // target. The caller is responsible to call set() on the returned instance, to keep track of
102 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
103 GrCCPathProcessor::Instance& appendDrawPathInstance() {
104 SkASSERT(this->isMapped());
105 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
106 return fPathInstanceData[fNextPathInstanceIdx++];
107 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600108
Chris Dalton351e80c2019-01-06 22:51:00 -0700109 // Finishes off the GPU buffers and renders the atlas(es).
Chris Daltonc4b47352019-08-23 10:10:36 -0600110 bool finalize(GrOnFlushResourceProvider*);
Chris Dalton9414c962018-06-14 10:14:50 -0600111
112 // Accessors used by draw calls, once the resources have been finalized.
Chris Daltone1639692018-08-20 14:00:30 -0600113 const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800114 const GrCCStroker& stroker() const { SkASSERT(!this->isMapped()); return fStroker; }
Brian Salomondbf70722019-02-07 11:31:24 -0500115 sk_sp<const GrGpuBuffer> refIndexBuffer() const {
Brian Salomon12d22642019-01-29 14:38:50 -0500116 SkASSERT(!this->isMapped());
117 return fIndexBuffer;
118 }
Brian Salomondbf70722019-02-07 11:31:24 -0500119 sk_sp<const GrGpuBuffer> refVertexBuffer() const {
Brian Salomon12d22642019-01-29 14:38:50 -0500120 SkASSERT(!this->isMapped());
121 return fVertexBuffer;
122 }
Brian Salomondbf70722019-02-07 11:31:24 -0500123 sk_sp<const GrGpuBuffer> refInstanceBuffer() const {
Brian Salomon12d22642019-01-29 14:38:50 -0500124 SkASSERT(!this->isMapped());
125 return fInstanceBuffer;
126 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600127 sk_sp<const GrGpuBuffer> refStencilResolveBuffer() const {
128 SkASSERT(!this->isMapped());
129 return fStencilResolveBuffer;
130 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600131
132private:
Chris Dalton351e80c2019-01-06 22:51:00 -0700133 void recordCopyPathInstance(const GrCCPathCacheEntry&, const SkIVector& newAtlasOffset,
Chris Daltonc3318f02019-07-19 14:20:53 -0600134 GrFillRule, sk_sp<GrTextureProxy> srcProxy);
135 void placeRenderedPathInAtlas(
136 const SkIRect& clippedPathIBounds, GrScissorTest, SkIVector* devToAtlasOffset);
137
138 // In MSAA mode we record an additional instance per path that draws a rectangle on top of its
139 // corresponding path in the atlas and resolves stencil winding values to coverage.
140 void recordStencilResolveInstance(
141 const SkIRect& clippedPathIBounds, const SkIVector& devToAtlasOffset, GrFillRule);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600142
Chris Daltone1639692018-08-20 14:00:30 -0600143 const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer;
144 GrCCFiller fFiller;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800145 GrCCStroker fStroker;
Chris Dalton4da70192018-06-18 09:51:36 -0600146 GrCCAtlasStack fCopyAtlasStack;
147 GrCCAtlasStack fRenderedAtlasStack;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600148
Brian Salomondbf70722019-02-07 11:31:24 -0500149 const sk_sp<const GrGpuBuffer> fIndexBuffer;
150 const sk_sp<const GrGpuBuffer> fVertexBuffer;
151 const sk_sp<GrGpuBuffer> fInstanceBuffer;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600152
153 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600154 int fNextCopyInstanceIdx;
155 SkDEBUGCODE(int fEndCopyInstance);
156 int fNextPathInstanceIdx;
Chris Daltonc3318f02019-07-19 14:20:53 -0600157 int fBasePathInstanceIdx;
Chris Dalton9414c962018-06-14 10:14:50 -0600158 SkDEBUGCODE(int fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700159
160 // Represents a range of copy-path instances that all share the same source proxy. (i.e. Draw
161 // instances that copy a path mask from a 16-bit coverage count atlas into an 8-bit literal
162 // coverage atlas.)
163 struct CopyPathRange {
164 CopyPathRange() = default;
165 CopyPathRange(sk_sp<GrTextureProxy> srcProxy, int count)
166 : fSrcProxy(std::move(srcProxy)), fCount(count) {}
167 sk_sp<GrTextureProxy> fSrcProxy;
168 int fCount;
169 };
170
171 SkSTArray<4, CopyPathRange> fCopyPathRanges;
172 int fCurrCopyAtlasRangesIdx = 0;
173
174 // This is a list of coverage count atlas textures that have been invalidated due to us copying
175 // their paths into new 8-bit literal coverage atlases. Since copying is finished by the time
176 // we begin rendering new atlases, we can recycle these textures for the rendered atlases rather
177 // than allocating new texture objects upon instantiation.
178 SkSTArray<2, sk_sp<GrTexture>> fRecyclableAtlasTextures;
179
Chris Daltonc3318f02019-07-19 14:20:53 -0600180 // Used in MSAA mode make an intermediate draw that resolves stencil winding values to coverage.
181 sk_sp<GrGpuBuffer> fStencilResolveBuffer;
182 GrStencilAtlasOp::ResolveRectInstance* fStencilResolveInstanceData = nullptr;
183 int fNextStencilResolveInstanceIdx = 0;
184 SkDEBUGCODE(int fEndStencilResolveInstance);
185
Chris Dalton351e80c2019-01-06 22:51:00 -0700186public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600187#ifdef SK_DEBUG
188 void debugOnly_didReuseRenderedPath() {
189 if (GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType()) {
190 --fEndStencilResolveInstance;
191 }
192 }
193#endif
Chris Dalton351e80c2019-01-06 22:51:00 -0700194 const GrTexture* testingOnly_frontCopyAtlasTexture() const;
195 const GrTexture* testingOnly_frontRenderedAtlasTexture() const;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600196};
197
Chris Dalton09a7bb22018-08-31 19:53:15 +0800198inline void GrCCRenderedPathStats::statPath(const SkPath& path) {
199 fMaxPointsPerPath = SkTMax(fMaxPointsPerPath, path.countPoints());
200 fNumTotalSkPoints += path.countPoints();
201 fNumTotalSkVerbs += path.countVerbs();
202 fNumTotalConicWeights += SkPathPriv::ConicWeightCnt(path);
203}
204
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600205#endif