blob: e920c06acc2ebe067f53b70c18943207179a73d7 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPerFlushResources_DEFINED
9#define GrCCPerFlushResources_DEFINED
10
Chris Daltond7e22272018-05-23 10:17:17 -060011#include "GrNonAtomicRef.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "ccpr/GrCCAtlas.h"
13#include "ccpr/GrCCPathParser.h"
14#include "ccpr/GrCCPathProcessor.h"
15
16/**
Chris Dalton42c21152018-06-13 15:28:19 -060017 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
18 * CCPR in a given flush.
19 */
20struct GrCCPerFlushResourceSpecs {
21 int fNumRenderedPaths = 0;
22 int fNumClipPaths = 0;
23 GrCCPathParser::PathStats fParsingPathStats;
24 GrCCAtlas::Specs fAtlasSpecs;
25
26 bool isEmpty() const { return 0 == fNumRenderedPaths + fNumClipPaths; }
27};
28
29/**
Chris Daltond7e22272018-05-23 10:17:17 -060030 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
31 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
32 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060033 */
Chris Daltond7e22272018-05-23 10:17:17 -060034class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060035public:
Chris Dalton42c21152018-06-13 15:28:19 -060036 GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060037
38 bool isMapped() const { return SkToBool(fPathInstanceData); }
39
Chris Dalton9414c962018-06-14 10:14:50 -060040 // Renders a path into a temporary atlas. See GrCCPathParser for a description of the arguments.
41 const GrCCAtlas* renderPathInAtlas(const SkIRect& clipIBounds, const SkMatrix&, const SkPath&,
42 SkRect* devBounds, SkRect* devBounds45,
43 SkIVector* devToAtlasOffset);
44 const GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath,
45 const SkIRect& devPathIBounds,
46 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060047
Chris Dalton9414c962018-06-14 10:14:50 -060048 // Returns the index in instanceBuffer() of the next instance that will be added by
49 // appendDrawPathInstance().
Chris Daltondaef06a2018-05-23 17:11:09 -060050 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060051
Chris Dalton9414c962018-06-14 10:14:50 -060052 // Appends an instance to instanceBuffer() that will draw a path to the destination render
53 // target. The caller is responsible to call set() on the returned instance, to keep track of
54 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
55 GrCCPathProcessor::Instance& appendDrawPathInstance() {
56 SkASSERT(this->isMapped());
57 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
58 return fPathInstanceData[fNextPathInstanceIdx++];
59 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060060
Chris Dalton9414c962018-06-14 10:14:50 -060061 // Finishes off the GPU buffers and renders the atlas(es).
62 bool finalize(GrOnFlushResourceProvider*, SkTArray<sk_sp<GrRenderTargetContext>>* out);
63
64 // Accessors used by draw calls, once the resources have been finalized.
65 const GrCCPathParser& pathParser() const { SkASSERT(!this->isMapped()); return fPathParser; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060066 const GrBuffer* indexBuffer() const { SkASSERT(!this->isMapped()); return fIndexBuffer.get(); }
67 const GrBuffer* vertexBuffer() const { SkASSERT(!this->isMapped()); return fVertexBuffer.get();}
68 GrBuffer* instanceBuffer() const { SkASSERT(!this->isMapped()); return fInstanceBuffer.get(); }
69
70private:
Chris Dalton9414c962018-06-14 10:14:50 -060071 bool placeParsedPathInAtlas(const SkIRect& clipIBounds, const SkIRect& pathIBounds,
72 SkIVector* devToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060073
Chris Dalton9414c962018-06-14 10:14:50 -060074 GrCCPathParser fPathParser;
75 GrCCAtlasStack fAtlasStack;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060076
Chris Dalton9414c962018-06-14 10:14:50 -060077 const sk_sp<const GrBuffer> fIndexBuffer;
78 const sk_sp<const GrBuffer> fVertexBuffer;
79 const sk_sp<GrBuffer> fInstanceBuffer;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060080
81 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
Chris Daltondaef06a2018-05-23 17:11:09 -060082 int fNextPathInstanceIdx = 0;
Chris Dalton9414c962018-06-14 10:14:50 -060083 SkDEBUGCODE(int fEndPathInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060084};
85
86#endif