Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCPerFlushResources_DEFINED |
| 9 | #define GrCCPerFlushResources_DEFINED |
| 10 | |
| 11 | #include "GrAllocator.h" |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 12 | #include "GrNonAtomicRef.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 13 | #include "ccpr/GrCCAtlas.h" |
| 14 | #include "ccpr/GrCCPathParser.h" |
| 15 | #include "ccpr/GrCCPathProcessor.h" |
| 16 | |
| 17 | /** |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame^] | 18 | * This struct encapsulates the minimum and desired requirements for the GPU resources required by |
| 19 | * CCPR in a given flush. |
| 20 | */ |
| 21 | struct GrCCPerFlushResourceSpecs { |
| 22 | int fNumRenderedPaths = 0; |
| 23 | int fNumClipPaths = 0; |
| 24 | GrCCPathParser::PathStats fParsingPathStats; |
| 25 | GrCCAtlas::Specs fAtlasSpecs; |
| 26 | |
| 27 | bool isEmpty() const { return 0 == fNumRenderedPaths + fNumClipPaths; } |
| 28 | }; |
| 29 | |
| 30 | /** |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 31 | * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's |
| 32 | * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed. |
| 33 | * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 34 | */ |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 35 | class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 36 | public: |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame^] | 37 | GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 38 | |
| 39 | bool isMapped() const { return SkToBool(fPathInstanceData); } |
| 40 | |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame^] | 41 | GrCCAtlas* renderPathInAtlas(const SkIRect& clipIBounds, const SkMatrix&, const SkPath&, |
| 42 | SkRect* devBounds, SkRect* devBounds45, int16_t* offsetX, |
| 43 | int16_t* offsetY); |
| 44 | GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath, |
| 45 | const SkIRect& devPathIBounds, int16_t* atlasOffsetX, |
| 46 | int16_t* atlasOffsetY); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 47 | |
Chris Dalton | 1c54894 | 2018-05-22 13:09:48 -0600 | [diff] [blame] | 48 | GrCCPathProcessor::Instance& appendDrawPathInstance() { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 49 | SkASSERT(this->isMapped()); |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 50 | SkASSERT(fNextPathInstanceIdx < fPathInstanceBufferCount); |
| 51 | return fPathInstanceData[fNextPathInstanceIdx++]; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 52 | } |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 53 | int nextPathInstanceIdx() const { return fNextPathInstanceIdx; } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 54 | |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 55 | bool finalize(GrOnFlushResourceProvider*, SkTArray<sk_sp<GrRenderTargetContext>>* atlasDraws); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 56 | |
| 57 | const GrBuffer* indexBuffer() const { SkASSERT(!this->isMapped()); return fIndexBuffer.get(); } |
| 58 | const GrBuffer* vertexBuffer() const { SkASSERT(!this->isMapped()); return fVertexBuffer.get();} |
| 59 | GrBuffer* instanceBuffer() const { SkASSERT(!this->isMapped()); return fInstanceBuffer.get(); } |
| 60 | |
| 61 | private: |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame^] | 62 | GrCCAtlas* placeParsedPathInAtlas(const SkIRect& clipIBounds, const SkIRect& pathIBounds, |
| 63 | int16_t* atlasOffsetX, int16_t* atlasOffsetY); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 64 | |
| 65 | const sk_sp<GrCCPathParser> fPathParser; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame^] | 66 | const GrCCAtlas::Specs fAtlasSpecs; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 67 | |
| 68 | sk_sp<const GrBuffer> fIndexBuffer; |
| 69 | sk_sp<const GrBuffer> fVertexBuffer; |
| 70 | sk_sp<GrBuffer> fInstanceBuffer; |
| 71 | |
| 72 | GrCCPathProcessor::Instance* fPathInstanceData = nullptr; |
Chris Dalton | daef06a | 2018-05-23 17:11:09 -0600 | [diff] [blame] | 73 | int fNextPathInstanceIdx = 0; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 74 | SkDEBUGCODE(int fPathInstanceBufferCount); |
| 75 | |
| 76 | GrSTAllocator<4, GrCCAtlas> fAtlases; |
| 77 | }; |
| 78 | |
| 79 | #endif |