blob: ad5ff8224b9d976fb07c2a38fae9e2149f3670f8 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPerFlushResources_DEFINED
9#define GrCCPerFlushResources_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrNonAtomicRef.h"
12#include "src/gpu/ccpr/GrCCAtlas.h"
13#include "src/gpu/ccpr/GrCCFiller.h"
14#include "src/gpu/ccpr/GrCCPathProcessor.h"
15#include "src/gpu/ccpr/GrCCStroker.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060016
Chris Dalton351e80c2019-01-06 22:51:00 -070017class GrCCPathCache;
Chris Dalton4da70192018-06-18 09:51:36 -060018class GrCCPathCacheEntry;
Chris Dalton8610e9c2019-05-09 11:07:10 -060019class GrOctoBounds;
Chris Dalton4da70192018-06-18 09:51:36 -060020class GrOnFlushResourceProvider;
Chris Dalton09a7bb22018-08-31 19:53:15 +080021class GrShape;
22
23/**
24 * This struct counts values that help us preallocate buffers for rendered path geometry.
25 */
26struct GrCCRenderedPathStats {
27 int fMaxPointsPerPath = 0;
28 int fNumTotalSkPoints = 0;
29 int fNumTotalSkVerbs = 0;
30 int fNumTotalConicWeights = 0;
31
32 void statPath(const SkPath&);
33};
Chris Dalton4da70192018-06-18 09:51:36 -060034
Chris Dalton5ba36ba2018-05-09 01:08:38 -060035/**
Chris Dalton42c21152018-06-13 15:28:19 -060036 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
37 * CCPR in a given flush.
38 */
39struct GrCCPerFlushResourceSpecs {
Chris Dalton09a7bb22018-08-31 19:53:15 +080040 static constexpr int kFillIdx = 0;
41 static constexpr int kStrokeIdx = 1;
42
Chris Dalton4da70192018-06-18 09:51:36 -060043 int fNumCachedPaths = 0;
44
Chris Dalton09a7bb22018-08-31 19:53:15 +080045 int fNumCopiedPaths[2] = {0, 0};
46 GrCCRenderedPathStats fCopyPathStats[2];
Chris Dalton4da70192018-06-18 09:51:36 -060047 GrCCAtlas::Specs fCopyAtlasSpecs;
48
Chris Dalton09a7bb22018-08-31 19:53:15 +080049 int fNumRenderedPaths[2] = {0, 0};
Chris Dalton42c21152018-06-13 15:28:19 -060050 int fNumClipPaths = 0;
Chris Dalton09a7bb22018-08-31 19:53:15 +080051 GrCCRenderedPathStats fRenderedPathStats[2];
Chris Dalton4da70192018-06-18 09:51:36 -060052 GrCCAtlas::Specs fRenderedAtlasSpecs;
Chris Dalton42c21152018-06-13 15:28:19 -060053
Chris Dalton4da70192018-06-18 09:51:36 -060054 bool isEmpty() const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080055 return 0 == fNumCachedPaths + fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx] +
56 fNumRenderedPaths[kFillIdx] + fNumRenderedPaths[kStrokeIdx] + fNumClipPaths;
Chris Dalton4da70192018-06-18 09:51:36 -060057 }
Chris Dalton351e80c2019-01-06 22:51:00 -070058 // Converts the copies to normal cached draws.
59 void cancelCopies();
Chris Dalton42c21152018-06-13 15:28:19 -060060};
61
62/**
Chris Daltond7e22272018-05-23 10:17:17 -060063 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
64 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
65 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060066 */
Chris Daltond7e22272018-05-23 10:17:17 -060067class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060068public:
Chris Dalton42c21152018-06-13 15:28:19 -060069 GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060070
71 bool isMapped() const { return SkToBool(fPathInstanceData); }
72
Chris Dalton351e80c2019-01-06 22:51:00 -070073 // Copies a coverage-counted path out of the given texture proxy, and into a cached, 8-bit,
74 // literal coverage atlas. Updates the cache entry to reference the new atlas.
75 void upgradeEntryToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
76 GrCCPathCacheEntry*, GrCCPathProcessor::DoEvenOddFill);
Chris Dalton4da70192018-06-18 09:51:36 -060077
Chris Dalton09a7bb22018-08-31 19:53:15 +080078 // These two methods render a path into a temporary coverage count atlas. See
Chris Dalton351e80c2019-01-06 22:51:00 -070079 // GrCCPathProcessor::Instance for a description of the outputs.
Chris Dalton09a7bb22018-08-31 19:53:15 +080080 //
81 // strokeDevWidth must be 0 for fills, 1 for hairlines, or the stroke width in device-space
82 // pixels for non-hairline strokes (implicitly requiring a rigid-body transform).
Chris Dalton8610e9c2019-05-09 11:07:10 -060083 GrCCAtlas* renderShapeInAtlas(
84 const SkIRect& clipIBounds, const SkMatrix&, const GrShape&, float strokeDevWidth,
85 GrOctoBounds*, SkIRect* devIBounds, SkIVector* devToAtlasOffset);
Chris Dalton9414c962018-06-14 10:14:50 -060086 const GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath,
87 const SkIRect& devPathIBounds,
88 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060089
Chris Dalton9414c962018-06-14 10:14:50 -060090 // Returns the index in instanceBuffer() of the next instance that will be added by
91 // appendDrawPathInstance().
Chris Daltondaef06a2018-05-23 17:11:09 -060092 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060093
Chris Dalton9414c962018-06-14 10:14:50 -060094 // Appends an instance to instanceBuffer() that will draw a path to the destination render
95 // target. The caller is responsible to call set() on the returned instance, to keep track of
96 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
97 GrCCPathProcessor::Instance& appendDrawPathInstance() {
98 SkASSERT(this->isMapped());
99 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
100 return fPathInstanceData[fNextPathInstanceIdx++];
101 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600102
Chris Dalton351e80c2019-01-06 22:51:00 -0700103 // Finishes off the GPU buffers and renders the atlas(es).
104 bool finalize(GrOnFlushResourceProvider*, SkTArray<sk_sp<GrRenderTargetContext>>* out);
Chris Dalton9414c962018-06-14 10:14:50 -0600105
106 // Accessors used by draw calls, once the resources have been finalized.
Chris Daltone1639692018-08-20 14:00:30 -0600107 const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800108 const GrCCStroker& stroker() const { SkASSERT(!this->isMapped()); return fStroker; }
Brian Salomondbf70722019-02-07 11:31:24 -0500109 sk_sp<const GrGpuBuffer> refIndexBuffer() const {
Brian Salomon12d22642019-01-29 14:38:50 -0500110 SkASSERT(!this->isMapped());
111 return fIndexBuffer;
112 }
Brian Salomondbf70722019-02-07 11:31:24 -0500113 sk_sp<const GrGpuBuffer> refVertexBuffer() const {
Brian Salomon12d22642019-01-29 14:38:50 -0500114 SkASSERT(!this->isMapped());
115 return fVertexBuffer;
116 }
Brian Salomondbf70722019-02-07 11:31:24 -0500117 sk_sp<const GrGpuBuffer> refInstanceBuffer() const {
Brian Salomon12d22642019-01-29 14:38:50 -0500118 SkASSERT(!this->isMapped());
119 return fInstanceBuffer;
120 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600121
122private:
Chris Dalton351e80c2019-01-06 22:51:00 -0700123 void recordCopyPathInstance(const GrCCPathCacheEntry&, const SkIVector& newAtlasOffset,
124 GrCCPathProcessor::DoEvenOddFill, sk_sp<GrTextureProxy> srcProxy);
Chris Daltona6fcb762019-05-13 08:57:53 -0600125 void placeRenderedPathInAtlas(const SkIRect& clippedPathIBounds, GrScissorTest,
Chris Daltone1639692018-08-20 14:00:30 -0600126 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600127
Chris Daltone1639692018-08-20 14:00:30 -0600128 const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer;
129 GrCCFiller fFiller;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800130 GrCCStroker fStroker;
Chris Dalton4da70192018-06-18 09:51:36 -0600131 GrCCAtlasStack fCopyAtlasStack;
132 GrCCAtlasStack fRenderedAtlasStack;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600133
Brian Salomondbf70722019-02-07 11:31:24 -0500134 const sk_sp<const GrGpuBuffer> fIndexBuffer;
135 const sk_sp<const GrGpuBuffer> fVertexBuffer;
136 const sk_sp<GrGpuBuffer> fInstanceBuffer;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600137
138 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600139 int fNextCopyInstanceIdx;
140 SkDEBUGCODE(int fEndCopyInstance);
141 int fNextPathInstanceIdx;
Chris Dalton9414c962018-06-14 10:14:50 -0600142 SkDEBUGCODE(int fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700143
144 // Represents a range of copy-path instances that all share the same source proxy. (i.e. Draw
145 // instances that copy a path mask from a 16-bit coverage count atlas into an 8-bit literal
146 // coverage atlas.)
147 struct CopyPathRange {
148 CopyPathRange() = default;
149 CopyPathRange(sk_sp<GrTextureProxy> srcProxy, int count)
150 : fSrcProxy(std::move(srcProxy)), fCount(count) {}
151 sk_sp<GrTextureProxy> fSrcProxy;
152 int fCount;
153 };
154
155 SkSTArray<4, CopyPathRange> fCopyPathRanges;
156 int fCurrCopyAtlasRangesIdx = 0;
157
158 // This is a list of coverage count atlas textures that have been invalidated due to us copying
159 // their paths into new 8-bit literal coverage atlases. Since copying is finished by the time
160 // we begin rendering new atlases, we can recycle these textures for the rendered atlases rather
161 // than allocating new texture objects upon instantiation.
162 SkSTArray<2, sk_sp<GrTexture>> fRecyclableAtlasTextures;
163
164public:
165 const GrTexture* testingOnly_frontCopyAtlasTexture() const;
166 const GrTexture* testingOnly_frontRenderedAtlasTexture() const;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600167};
168
Chris Dalton09a7bb22018-08-31 19:53:15 +0800169inline void GrCCRenderedPathStats::statPath(const SkPath& path) {
170 fMaxPointsPerPath = SkTMax(fMaxPointsPerPath, path.countPoints());
171 fNumTotalSkPoints += path.countPoints();
172 fNumTotalSkVerbs += path.countVerbs();
173 fNumTotalConicWeights += SkPathPriv::ConicWeightCnt(path);
174}
175
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600176#endif