blob: bc6b6edff3e15d28a7d9b7fcb6470083b42adc22 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPerFlushResources_DEFINED
9#define GrCCPerFlushResources_DEFINED
10
11#include "GrAllocator.h"
Chris Daltond7e22272018-05-23 10:17:17 -060012#include "GrNonAtomicRef.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060013#include "ccpr/GrCCAtlas.h"
14#include "ccpr/GrCCPathParser.h"
15#include "ccpr/GrCCPathProcessor.h"
16
17/**
Chris Daltond7e22272018-05-23 10:17:17 -060018 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
19 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
20 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060021 */
Chris Daltond7e22272018-05-23 10:17:17 -060022class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023public:
24 GrCCPerFlushResources(GrOnFlushResourceProvider*, int numPathDraws, int numClipPaths,
25 const GrCCPathParser::PathStats&);
26
27 bool isMapped() const { return SkToBool(fPathInstanceData); }
28
Chris Daltondaef06a2018-05-23 17:11:09 -060029 GrCCAtlas* renderPathInAtlas(const GrCaps&, const SkIRect& clipIBounds, const SkMatrix&,
30 const SkPath&, SkRect* devBounds, SkRect* devBounds45,
31 int16_t* offsetX, int16_t* offsetY);
32 GrCCAtlas* renderDeviceSpacePathInAtlas(const GrCaps&, const SkIRect& clipIBounds,
33 const SkPath& devPath, const SkIRect& devPathIBounds,
34 int16_t* atlasOffsetX, int16_t* atlasOffsetY);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060035
Chris Dalton1c548942018-05-22 13:09:48 -060036 GrCCPathProcessor::Instance& appendDrawPathInstance() {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060037 SkASSERT(this->isMapped());
Chris Daltondaef06a2018-05-23 17:11:09 -060038 SkASSERT(fNextPathInstanceIdx < fPathInstanceBufferCount);
39 return fPathInstanceData[fNextPathInstanceIdx++];
Chris Dalton5ba36ba2018-05-09 01:08:38 -060040 }
Chris Daltondaef06a2018-05-23 17:11:09 -060041 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060042
Chris Daltondaef06a2018-05-23 17:11:09 -060043 bool finalize(GrOnFlushResourceProvider*, SkTArray<sk_sp<GrRenderTargetContext>>* atlasDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060044
45 const GrBuffer* indexBuffer() const { SkASSERT(!this->isMapped()); return fIndexBuffer.get(); }
46 const GrBuffer* vertexBuffer() const { SkASSERT(!this->isMapped()); return fVertexBuffer.get();}
47 GrBuffer* instanceBuffer() const { SkASSERT(!this->isMapped()); return fInstanceBuffer.get(); }
48
49private:
50 GrCCAtlas* placeParsedPathInAtlas(const GrCaps&, const SkIRect& clipIBounds,
51 const SkIRect& pathIBounds, int16_t* atlasOffsetX,
52 int16_t* atlasOffsetY);
53
54 const sk_sp<GrCCPathParser> fPathParser;
55
56 sk_sp<const GrBuffer> fIndexBuffer;
57 sk_sp<const GrBuffer> fVertexBuffer;
58 sk_sp<GrBuffer> fInstanceBuffer;
59
60 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
Chris Daltondaef06a2018-05-23 17:11:09 -060061 int fNextPathInstanceIdx = 0;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060062 SkDEBUGCODE(int fPathInstanceBufferCount);
63
64 GrSTAllocator<4, GrCCAtlas> fAtlases;
65};
66
67#endif