blob: 3fa392eed97e3c552cd3f3a301730584e868e43c [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPerFlushResources_DEFINED
9#define GrCCPerFlushResources_DEFINED
10
Chris Daltond7e22272018-05-23 10:17:17 -060011#include "GrNonAtomicRef.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "ccpr/GrCCAtlas.h"
Chris Daltone1639692018-08-20 14:00:30 -060013#include "ccpr/GrCCFiller.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ccpr/GrCCPathProcessor.h"
15
Chris Dalton4da70192018-06-18 09:51:36 -060016class GrCCPathCacheEntry;
17class GrOnFlushResourceProvider;
18
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019/**
Chris Dalton42c21152018-06-13 15:28:19 -060020 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
21 * CCPR in a given flush.
22 */
23struct GrCCPerFlushResourceSpecs {
Chris Dalton4da70192018-06-18 09:51:36 -060024 int fNumCachedPaths = 0;
25
26 int fNumCopiedPaths = 0;
Chris Daltone1639692018-08-20 14:00:30 -060027 GrCCFiller::PathStats fCopyPathStats;
Chris Dalton4da70192018-06-18 09:51:36 -060028 GrCCAtlas::Specs fCopyAtlasSpecs;
29
Chris Dalton42c21152018-06-13 15:28:19 -060030 int fNumRenderedPaths = 0;
31 int fNumClipPaths = 0;
Chris Daltone1639692018-08-20 14:00:30 -060032 GrCCFiller::PathStats fRenderedPathStats;
Chris Dalton4da70192018-06-18 09:51:36 -060033 GrCCAtlas::Specs fRenderedAtlasSpecs;
Chris Dalton42c21152018-06-13 15:28:19 -060034
Chris Dalton4da70192018-06-18 09:51:36 -060035 bool isEmpty() const {
36 return 0 == fNumCachedPaths + fNumCopiedPaths + fNumRenderedPaths + fNumClipPaths;
37 }
38 void convertCopiesToRenders();
Chris Dalton42c21152018-06-13 15:28:19 -060039};
40
41/**
Chris Daltond7e22272018-05-23 10:17:17 -060042 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
43 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
44 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060045 */
Chris Daltond7e22272018-05-23 10:17:17 -060046class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060047public:
Chris Dalton42c21152018-06-13 15:28:19 -060048 GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060049
50 bool isMapped() const { return SkToBool(fPathInstanceData); }
51
Chris Dalton4da70192018-06-18 09:51:36 -060052 // Copies a path out of the the previous flush's stashed mainline coverage count atlas, and into
53 // a cached, 8-bit, literal-coverage atlas. The actual source texture to copy from will be
54 // provided at the time finalize() is called.
55 GrCCAtlas* copyPathToCachedAtlas(const GrCCPathCacheEntry&, GrCCPathProcessor::DoEvenOddFill,
56 SkIVector* newAtlasOffset);
57
58 // These two methods render a path into a temporary coverage count atlas. See GrCCPathParser for
59 // a description of the arguments. The returned atlases are "const" to prevent the caller from
60 // assigning a unique key.
Chris Dalton9414c962018-06-14 10:14:50 -060061 const GrCCAtlas* renderPathInAtlas(const SkIRect& clipIBounds, const SkMatrix&, const SkPath&,
Chris Dalton4da70192018-06-18 09:51:36 -060062 SkRect* devBounds, SkRect* devBounds45, SkIRect* devIBounds,
Chris Dalton9414c962018-06-14 10:14:50 -060063 SkIVector* devToAtlasOffset);
64 const GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath,
65 const SkIRect& devPathIBounds,
66 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060067
Chris Dalton9414c962018-06-14 10:14:50 -060068 // Returns the index in instanceBuffer() of the next instance that will be added by
69 // appendDrawPathInstance().
Chris Daltondaef06a2018-05-23 17:11:09 -060070 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060071
Chris Dalton9414c962018-06-14 10:14:50 -060072 // Appends an instance to instanceBuffer() that will draw a path to the destination render
73 // target. The caller is responsible to call set() on the returned instance, to keep track of
74 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
75 GrCCPathProcessor::Instance& appendDrawPathInstance() {
76 SkASSERT(this->isMapped());
77 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
78 return fPathInstanceData[fNextPathInstanceIdx++];
79 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060080
Chris Dalton4da70192018-06-18 09:51:36 -060081 // Finishes off the GPU buffers and renders the atlas(es). 'stashedAtlasProxy', if provided, is
82 // the mainline coverage count atlas from the previous flush. It will be used as the source
83 // texture for any copies setup by copyStashedPathToAtlas().
84 bool finalize(GrOnFlushResourceProvider*, sk_sp<GrTextureProxy> stashedAtlasProxy,
85 SkTArray<sk_sp<GrRenderTargetContext>>* out);
Chris Dalton9414c962018-06-14 10:14:50 -060086
87 // Accessors used by draw calls, once the resources have been finalized.
Chris Daltone1639692018-08-20 14:00:30 -060088 const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060089 const GrBuffer* indexBuffer() const { SkASSERT(!this->isMapped()); return fIndexBuffer.get(); }
90 const GrBuffer* vertexBuffer() const { SkASSERT(!this->isMapped()); return fVertexBuffer.get();}
91 GrBuffer* instanceBuffer() const { SkASSERT(!this->isMapped()); return fInstanceBuffer.get(); }
92
Chris Dalton4da70192018-06-18 09:51:36 -060093 // Returns the mainline coverage count atlas that the client may stash for next flush, if any.
94 // The caller is responsible to call getOrAssignUniqueKey() on this atlas if they wish to
95 // actually stash it in order to copy paths into cached atlases.
96 GrCCAtlas* nextAtlasToStash() {
97 return fRenderedAtlasStack.empty() ? nullptr : &fRenderedAtlasStack.front();
98 }
99
100 // Returs true if the client has called getOrAssignUniqueKey() on our nextAtlasToStash().
101 bool hasStashedAtlas() const {
102 return !fRenderedAtlasStack.empty() && fRenderedAtlasStack.front().uniqueKey().isValid();
103 }
104 const GrUniqueKey& stashedAtlasKey() const {
105 SkASSERT(this->hasStashedAtlas());
106 return fRenderedAtlasStack.front().uniqueKey();
107 }
108
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600109private:
Chris Daltone1639692018-08-20 14:00:30 -0600110 bool placeRenderedPathInAtlas(const SkIRect& clipIBounds, const SkIRect& pathIBounds,
111 GrScissorTest*, SkIRect* clippedPathIBounds,
112 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600113
Chris Daltone1639692018-08-20 14:00:30 -0600114 const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer;
115 GrCCFiller fFiller;
Chris Dalton4da70192018-06-18 09:51:36 -0600116 GrCCAtlasStack fCopyAtlasStack;
117 GrCCAtlasStack fRenderedAtlasStack;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600118
Chris Dalton9414c962018-06-14 10:14:50 -0600119 const sk_sp<const GrBuffer> fIndexBuffer;
120 const sk_sp<const GrBuffer> fVertexBuffer;
121 const sk_sp<GrBuffer> fInstanceBuffer;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600122
123 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600124 int fNextCopyInstanceIdx;
125 SkDEBUGCODE(int fEndCopyInstance);
126 int fNextPathInstanceIdx;
Chris Dalton9414c962018-06-14 10:14:50 -0600127 SkDEBUGCODE(int fEndPathInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600128};
129
130#endif