Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCPerFlushResources_DEFINED |
| 9 | #define GrCCPerFlushResources_DEFINED |
| 10 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 11 | #include "GrNonAtomicRef.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 12 | #include "ccpr/GrCCAtlas.h" |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 13 | #include "ccpr/GrCCFiller.h" |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 14 | #include "ccpr/GrCCStroker.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 15 | #include "ccpr/GrCCPathProcessor.h" |
| 16 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 17 | class GrCCPathCacheEntry; |
| 18 | class GrOnFlushResourceProvider; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 19 | class GrShape; |
| 20 | |
| 21 | /** |
| 22 | * This struct counts values that help us preallocate buffers for rendered path geometry. |
| 23 | */ |
| 24 | struct GrCCRenderedPathStats { |
| 25 | int fMaxPointsPerPath = 0; |
| 26 | int fNumTotalSkPoints = 0; |
| 27 | int fNumTotalSkVerbs = 0; |
| 28 | int fNumTotalConicWeights = 0; |
| 29 | |
| 30 | void statPath(const SkPath&); |
| 31 | }; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 32 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 33 | /** |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 34 | * This struct encapsulates the minimum and desired requirements for the GPU resources required by |
| 35 | * CCPR in a given flush. |
| 36 | */ |
| 37 | struct GrCCPerFlushResourceSpecs { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 38 | static constexpr int kFillIdx = 0; |
| 39 | static constexpr int kStrokeIdx = 1; |
| 40 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 41 | int fNumCachedPaths = 0; |
| 42 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 43 | int fNumCopiedPaths[2] = {0, 0}; |
| 44 | GrCCRenderedPathStats fCopyPathStats[2]; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 45 | GrCCAtlas::Specs fCopyAtlasSpecs; |
| 46 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 47 | int fNumRenderedPaths[2] = {0, 0}; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 48 | int fNumClipPaths = 0; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 49 | GrCCRenderedPathStats fRenderedPathStats[2]; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 50 | GrCCAtlas::Specs fRenderedAtlasSpecs; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 51 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 52 | bool isEmpty() const { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 53 | return 0 == fNumCachedPaths + fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx] + |
| 54 | fNumRenderedPaths[kFillIdx] + fNumRenderedPaths[kStrokeIdx] + fNumClipPaths; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 55 | } |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 56 | void convertCopiesToRenders(); |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 57 | }; |
| 58 | |
| 59 | /** |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 60 | * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's |
| 61 | * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed. |
| 62 | * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 63 | */ |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 64 | class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 65 | public: |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 66 | GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 67 | |
| 68 | bool isMapped() const { return SkToBool(fPathInstanceData); } |
| 69 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 70 | // Copies a path out of the the previous flush's stashed mainline coverage count atlas, and into |
| 71 | // a cached, 8-bit, literal-coverage atlas. The actual source texture to copy from will be |
| 72 | // provided at the time finalize() is called. |
| 73 | GrCCAtlas* copyPathToCachedAtlas(const GrCCPathCacheEntry&, GrCCPathProcessor::DoEvenOddFill, |
| 74 | SkIVector* newAtlasOffset); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 75 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 76 | // These two methods render a path into a temporary coverage count atlas. See |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 77 | // GrCCPathProcessor::Instance for a description of the outputs. The returned atlases are |
| 78 | // "const" to prevent the caller from assigning a unique key. |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 79 | // |
| 80 | // strokeDevWidth must be 0 for fills, 1 for hairlines, or the stroke width in device-space |
| 81 | // pixels for non-hairline strokes (implicitly requiring a rigid-body transform). |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 82 | const GrCCAtlas* renderShapeInAtlas(const SkIRect& clipIBounds, const SkMatrix&, const GrShape&, |
| 83 | float strokeDevWidth, SkRect* devBounds, |
| 84 | SkRect* devBounds45, SkIRect* devIBounds, |
| 85 | SkIVector* devToAtlasOffset); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 86 | const GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath, |
| 87 | const SkIRect& devPathIBounds, |
| 88 | SkIVector* devToAtlasOffset); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 89 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 90 | // Returns the index in instanceBuffer() of the next instance that will be added by |
| 91 | // appendDrawPathInstance(). |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 92 | int nextPathInstanceIdx() const { return fNextPathInstanceIdx; } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 93 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 94 | // Appends an instance to instanceBuffer() that will draw a path to the destination render |
| 95 | // target. The caller is responsible to call set() on the returned instance, to keep track of |
| 96 | // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call. |
| 97 | GrCCPathProcessor::Instance& appendDrawPathInstance() { |
| 98 | SkASSERT(this->isMapped()); |
| 99 | SkASSERT(fNextPathInstanceIdx < fEndPathInstance); |
| 100 | return fPathInstanceData[fNextPathInstanceIdx++]; |
| 101 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 102 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 103 | // Finishes off the GPU buffers and renders the atlas(es). 'stashedAtlasProxy', if provided, is |
| 104 | // the mainline coverage count atlas from the previous flush. It will be used as the source |
| 105 | // texture for any copies setup by copyStashedPathToAtlas(). |
| 106 | bool finalize(GrOnFlushResourceProvider*, sk_sp<GrTextureProxy> stashedAtlasProxy, |
| 107 | SkTArray<sk_sp<GrRenderTargetContext>>* out); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 108 | |
| 109 | // Accessors used by draw calls, once the resources have been finalized. |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 110 | const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 111 | const GrCCStroker& stroker() const { SkASSERT(!this->isMapped()); return fStroker; } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 112 | const GrBuffer* indexBuffer() const { SkASSERT(!this->isMapped()); return fIndexBuffer.get(); } |
| 113 | const GrBuffer* vertexBuffer() const { SkASSERT(!this->isMapped()); return fVertexBuffer.get();} |
| 114 | GrBuffer* instanceBuffer() const { SkASSERT(!this->isMapped()); return fInstanceBuffer.get(); } |
| 115 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 116 | // Returns the mainline coverage count atlas that the client may stash for next flush, if any. |
| 117 | // The caller is responsible to call getOrAssignUniqueKey() on this atlas if they wish to |
| 118 | // actually stash it in order to copy paths into cached atlases. |
| 119 | GrCCAtlas* nextAtlasToStash() { |
| 120 | return fRenderedAtlasStack.empty() ? nullptr : &fRenderedAtlasStack.front(); |
| 121 | } |
| 122 | |
| 123 | // Returs true if the client has called getOrAssignUniqueKey() on our nextAtlasToStash(). |
| 124 | bool hasStashedAtlas() const { |
| 125 | return !fRenderedAtlasStack.empty() && fRenderedAtlasStack.front().uniqueKey().isValid(); |
| 126 | } |
| 127 | const GrUniqueKey& stashedAtlasKey() const { |
| 128 | SkASSERT(this->hasStashedAtlas()); |
| 129 | return fRenderedAtlasStack.front().uniqueKey(); |
| 130 | } |
| 131 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 132 | private: |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 133 | bool placeRenderedPathInAtlas(const SkIRect& clipIBounds, const SkIRect& pathIBounds, |
| 134 | GrScissorTest*, SkIRect* clippedPathIBounds, |
| 135 | SkIVector* devToAtlasOffset); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 136 | |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 137 | const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer; |
| 138 | GrCCFiller fFiller; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 139 | GrCCStroker fStroker; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 140 | GrCCAtlasStack fCopyAtlasStack; |
| 141 | GrCCAtlasStack fRenderedAtlasStack; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 142 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 143 | const sk_sp<const GrBuffer> fIndexBuffer; |
| 144 | const sk_sp<const GrBuffer> fVertexBuffer; |
| 145 | const sk_sp<GrBuffer> fInstanceBuffer; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 146 | |
| 147 | GrCCPathProcessor::Instance* fPathInstanceData = nullptr; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 148 | int fNextCopyInstanceIdx; |
| 149 | SkDEBUGCODE(int fEndCopyInstance); |
| 150 | int fNextPathInstanceIdx; |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 151 | SkDEBUGCODE(int fEndPathInstance); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 152 | }; |
| 153 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 154 | inline void GrCCRenderedPathStats::statPath(const SkPath& path) { |
| 155 | fMaxPointsPerPath = SkTMax(fMaxPointsPerPath, path.countPoints()); |
| 156 | fNumTotalSkPoints += path.countPoints(); |
| 157 | fNumTotalSkVerbs += path.countVerbs(); |
| 158 | fNumTotalConicWeights += SkPathPriv::ConicWeightCnt(path); |
| 159 | } |
| 160 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 161 | #endif |