blob: 132068f882c8911ede53f73c342960437ccaeb08 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPerFlushResources_DEFINED
9#define GrCCPerFlushResources_DEFINED
10
Chris Daltond7e22272018-05-23 10:17:17 -060011#include "GrNonAtomicRef.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "ccpr/GrCCAtlas.h"
Chris Daltone1639692018-08-20 14:00:30 -060013#include "ccpr/GrCCFiller.h"
Chris Dalton09a7bb22018-08-31 19:53:15 +080014#include "ccpr/GrCCStroker.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060015#include "ccpr/GrCCPathProcessor.h"
16
Chris Dalton4da70192018-06-18 09:51:36 -060017class GrCCPathCacheEntry;
18class GrOnFlushResourceProvider;
Chris Dalton09a7bb22018-08-31 19:53:15 +080019class GrShape;
20
21/**
22 * This struct counts values that help us preallocate buffers for rendered path geometry.
23 */
24struct GrCCRenderedPathStats {
25 int fMaxPointsPerPath = 0;
26 int fNumTotalSkPoints = 0;
27 int fNumTotalSkVerbs = 0;
28 int fNumTotalConicWeights = 0;
29
30 void statPath(const SkPath&);
31};
Chris Dalton4da70192018-06-18 09:51:36 -060032
Chris Dalton5ba36ba2018-05-09 01:08:38 -060033/**
Chris Dalton42c21152018-06-13 15:28:19 -060034 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
35 * CCPR in a given flush.
36 */
37struct GrCCPerFlushResourceSpecs {
Chris Dalton09a7bb22018-08-31 19:53:15 +080038 static constexpr int kFillIdx = 0;
39 static constexpr int kStrokeIdx = 1;
40
Chris Dalton4da70192018-06-18 09:51:36 -060041 int fNumCachedPaths = 0;
42
Chris Dalton09a7bb22018-08-31 19:53:15 +080043 int fNumCopiedPaths[2] = {0, 0};
44 GrCCRenderedPathStats fCopyPathStats[2];
Chris Dalton4da70192018-06-18 09:51:36 -060045 GrCCAtlas::Specs fCopyAtlasSpecs;
46
Chris Dalton09a7bb22018-08-31 19:53:15 +080047 int fNumRenderedPaths[2] = {0, 0};
Chris Dalton42c21152018-06-13 15:28:19 -060048 int fNumClipPaths = 0;
Chris Dalton09a7bb22018-08-31 19:53:15 +080049 GrCCRenderedPathStats fRenderedPathStats[2];
Chris Dalton4da70192018-06-18 09:51:36 -060050 GrCCAtlas::Specs fRenderedAtlasSpecs;
Chris Dalton42c21152018-06-13 15:28:19 -060051
Chris Dalton4da70192018-06-18 09:51:36 -060052 bool isEmpty() const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080053 return 0 == fNumCachedPaths + fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx] +
54 fNumRenderedPaths[kFillIdx] + fNumRenderedPaths[kStrokeIdx] + fNumClipPaths;
Chris Dalton4da70192018-06-18 09:51:36 -060055 }
Chris Dalton2e825a32019-01-04 22:14:27 +000056 void convertCopiesToRenders();
Chris Dalton42c21152018-06-13 15:28:19 -060057};
58
59/**
Chris Daltond7e22272018-05-23 10:17:17 -060060 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
61 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
62 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060063 */
Chris Daltond7e22272018-05-23 10:17:17 -060064class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060065public:
Chris Dalton42c21152018-06-13 15:28:19 -060066 GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060067
68 bool isMapped() const { return SkToBool(fPathInstanceData); }
69
Chris Dalton2e825a32019-01-04 22:14:27 +000070 // Copies a path out of the the previous flush's stashed mainline coverage count atlas, and into
71 // a cached, 8-bit, literal-coverage atlas. The actual source texture to copy from will be
72 // provided at the time finalize() is called.
73 GrCCAtlas* copyPathToCachedAtlas(const GrCCPathCacheEntry&, GrCCPathProcessor::DoEvenOddFill,
74 SkIVector* newAtlasOffset);
Chris Dalton4da70192018-06-18 09:51:36 -060075
Chris Dalton09a7bb22018-08-31 19:53:15 +080076 // These two methods render a path into a temporary coverage count atlas. See
Chris Dalton2e825a32019-01-04 22:14:27 +000077 // GrCCPathProcessor::Instance for a description of the outputs. The returned atlases are
78 // "const" to prevent the caller from assigning a unique key.
Chris Dalton09a7bb22018-08-31 19:53:15 +080079 //
80 // strokeDevWidth must be 0 for fills, 1 for hairlines, or the stroke width in device-space
81 // pixels for non-hairline strokes (implicitly requiring a rigid-body transform).
Chris Dalton2e825a32019-01-04 22:14:27 +000082 const GrCCAtlas* renderShapeInAtlas(const SkIRect& clipIBounds, const SkMatrix&, const GrShape&,
83 float strokeDevWidth, SkRect* devBounds,
84 SkRect* devBounds45, SkIRect* devIBounds,
85 SkIVector* devToAtlasOffset);
Chris Dalton9414c962018-06-14 10:14:50 -060086 const GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath,
87 const SkIRect& devPathIBounds,
88 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060089
Chris Dalton9414c962018-06-14 10:14:50 -060090 // Returns the index in instanceBuffer() of the next instance that will be added by
91 // appendDrawPathInstance().
Chris Daltondaef06a2018-05-23 17:11:09 -060092 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060093
Chris Dalton9414c962018-06-14 10:14:50 -060094 // Appends an instance to instanceBuffer() that will draw a path to the destination render
95 // target. The caller is responsible to call set() on the returned instance, to keep track of
96 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
97 GrCCPathProcessor::Instance& appendDrawPathInstance() {
98 SkASSERT(this->isMapped());
99 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
100 return fPathInstanceData[fNextPathInstanceIdx++];
101 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600102
Chris Dalton2e825a32019-01-04 22:14:27 +0000103 // Finishes off the GPU buffers and renders the atlas(es). 'stashedAtlasProxy', if provided, is
104 // the mainline coverage count atlas from the previous flush. It will be used as the source
105 // texture for any copies setup by copyStashedPathToAtlas().
106 bool finalize(GrOnFlushResourceProvider*, sk_sp<GrTextureProxy> stashedAtlasProxy,
107 SkTArray<sk_sp<GrRenderTargetContext>>* out);
Chris Dalton9414c962018-06-14 10:14:50 -0600108
109 // Accessors used by draw calls, once the resources have been finalized.
Chris Daltone1639692018-08-20 14:00:30 -0600110 const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800111 const GrCCStroker& stroker() const { SkASSERT(!this->isMapped()); return fStroker; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600112 const GrBuffer* indexBuffer() const { SkASSERT(!this->isMapped()); return fIndexBuffer.get(); }
113 const GrBuffer* vertexBuffer() const { SkASSERT(!this->isMapped()); return fVertexBuffer.get();}
114 GrBuffer* instanceBuffer() const { SkASSERT(!this->isMapped()); return fInstanceBuffer.get(); }
115
Chris Dalton2e825a32019-01-04 22:14:27 +0000116 // Returns the mainline coverage count atlas that the client may stash for next flush, if any.
117 // The caller is responsible to call getOrAssignUniqueKey() on this atlas if they wish to
118 // actually stash it in order to copy paths into cached atlases.
119 GrCCAtlas* nextAtlasToStash() {
120 return fRenderedAtlasStack.empty() ? nullptr : &fRenderedAtlasStack.front();
121 }
122
123 // Returs true if the client has called getOrAssignUniqueKey() on our nextAtlasToStash().
124 bool hasStashedAtlas() const {
125 return !fRenderedAtlasStack.empty() && fRenderedAtlasStack.front().uniqueKey().isValid();
126 }
127 const GrUniqueKey& stashedAtlasKey() const {
128 SkASSERT(this->hasStashedAtlas());
129 return fRenderedAtlasStack.front().uniqueKey();
130 }
131
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600132private:
Chris Daltone1639692018-08-20 14:00:30 -0600133 bool placeRenderedPathInAtlas(const SkIRect& clipIBounds, const SkIRect& pathIBounds,
134 GrScissorTest*, SkIRect* clippedPathIBounds,
135 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600136
Chris Daltone1639692018-08-20 14:00:30 -0600137 const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer;
138 GrCCFiller fFiller;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800139 GrCCStroker fStroker;
Chris Dalton4da70192018-06-18 09:51:36 -0600140 GrCCAtlasStack fCopyAtlasStack;
141 GrCCAtlasStack fRenderedAtlasStack;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600142
Chris Dalton9414c962018-06-14 10:14:50 -0600143 const sk_sp<const GrBuffer> fIndexBuffer;
144 const sk_sp<const GrBuffer> fVertexBuffer;
145 const sk_sp<GrBuffer> fInstanceBuffer;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600146
147 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600148 int fNextCopyInstanceIdx;
149 SkDEBUGCODE(int fEndCopyInstance);
150 int fNextPathInstanceIdx;
Chris Dalton9414c962018-06-14 10:14:50 -0600151 SkDEBUGCODE(int fEndPathInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600152};
153
Chris Dalton09a7bb22018-08-31 19:53:15 +0800154inline void GrCCRenderedPathStats::statPath(const SkPath& path) {
155 fMaxPointsPerPath = SkTMax(fMaxPointsPerPath, path.countPoints());
156 fNumTotalSkPoints += path.countPoints();
157 fNumTotalSkVerbs += path.countVerbs();
158 fNumTotalConicWeights += SkPathPriv::ConicWeightCnt(path);
159}
160
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600161#endif