blob: d3b8ac1863c0f81d3000fac6e87f96de8cfba3a2 [file] [log] [blame]
robertphillips77a2e522015-10-17 07:43:27 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrDrawingManager_DEFINED
9#define GrDrawingManager_DEFINED
10
Brian Salomon601ac802019-02-07 13:37:16 -050011#include "GrBufferAllocPool.h"
Herb Derbydc214c22018-11-08 13:31:39 -050012#include "GrDeferredUpload.h"
robertphillips68737822015-10-29 12:12:21 -070013#include "GrPathRenderer.h"
Brian Salomon742e31d2016-12-07 17:06:19 -050014#include "GrPathRendererChain.h"
bsalomonb77a9072016-09-07 10:02:04 -070015#include "GrResourceCache.h"
Greg Danielbae71212019-03-01 15:24:35 -050016#include "SkSurface.h"
Robert Phillipseb35f4d2017-03-21 07:56:47 -040017#include "SkTArray.h"
Herb Derby26cbe512018-05-24 14:39:01 -040018#include "text/GrTextContext.h"
Robert Phillipsf2361d22016-10-25 14:20:06 -040019
Chris Daltonfddb6c02017-11-04 15:22:22 -060020class GrCoverageCountingPathRenderer;
Robert Phillipsfbcef6e2017-06-15 12:07:18 -040021class GrOnFlushCallbackObject;
Herb Derbydc214c22018-11-08 13:31:39 -050022class GrOpFlushState;
Robert Phillips69893702019-02-22 11:16:30 -050023class GrRecordingContext;
Brian Osman11052242016-10-27 14:47:55 -040024class GrRenderTargetContext;
Robert Phillipsc7635fa2016-10-28 13:25:24 -040025class GrRenderTargetProxy;
Brian Salomon653f42f2018-07-10 10:07:31 -040026class GrRenderTargetOpList;
robertphillips68737822015-10-29 12:12:21 -070027class GrSoftwarePathRenderer;
Brian Osman45580d32016-11-23 09:37:01 -050028class GrTextureContext;
29class GrTextureOpList;
Brian Salomon653f42f2018-07-10 10:07:31 -040030class SkDeferredDisplayList;
robertphillips77a2e522015-10-17 07:43:27 -070031
Brian Osman11052242016-10-27 14:47:55 -040032// The GrDrawingManager allocates a new GrRenderTargetContext for each GrRenderTarget
Robert Phillipsf2361d22016-10-25 14:20:06 -040033// but all of them still land in the same GrOpList!
robertphillips77a2e522015-10-17 07:43:27 -070034//
Brian Osman11052242016-10-27 14:47:55 -040035// In the future this class will allocate a new GrRenderTargetContext for
Robert Phillipsf2361d22016-10-25 14:20:06 -040036// each GrRenderTarget/GrOpList and manage the DAG.
robertphillips77a2e522015-10-17 07:43:27 -070037class GrDrawingManager {
38public:
39 ~GrDrawingManager();
40
robertphillips68737822015-10-29 12:12:21 -070041 void freeGpuResources();
robertphillips77a2e522015-10-17 07:43:27 -070042
Robert Phillips37430132016-11-09 06:50:43 -050043 sk_sp<GrRenderTargetContext> makeRenderTargetContext(sk_sp<GrSurfaceProxy>,
Brian Osman11052242016-10-27 14:47:55 -040044 sk_sp<SkColorSpace>,
Robert Phillips941d1442017-06-14 16:37:02 -040045 const SkSurfaceProps*,
46 bool managedOpList = true);
Robert Phillips2c862492017-01-18 10:08:39 -050047 sk_sp<GrTextureContext> makeTextureContext(sk_sp<GrSurfaceProxy>, sk_sp<SkColorSpace>);
robertphillips77a2e522015-10-17 07:43:27 -070048
Robert Phillipsf2361d22016-10-25 14:20:06 -040049 // The caller automatically gets a ref on the returned opList. It must
robertphillips77a2e522015-10-17 07:43:27 -070050 // be balanced by an unref call.
Robert Phillips941d1442017-06-14 16:37:02 -040051 // A managed opList is controlled by the drawing manager (i.e., sorted & flushed with the
52 // other). An unmanaged one is created and used by the onFlushCallback.
53 sk_sp<GrRenderTargetOpList> newRTOpList(GrRenderTargetProxy* rtp, bool managedOpList);
Robert Phillipsb6deea82017-05-11 14:14:30 -040054 sk_sp<GrTextureOpList> newTextureOpList(GrTextureProxy* textureProxy);
robertphillips77a2e522015-10-17 07:43:27 -070055
Robert Phillips69893702019-02-22 11:16:30 -050056 GrRecordingContext* getContext() { return fContext; }
robertphillips77a2e522015-10-17 07:43:27 -070057
Herb Derby26cbe512018-05-24 14:39:01 -040058 GrTextContext* getTextContext();
brianosman86e76262016-08-11 12:17:31 -070059
robertphillips68737822015-10-29 12:12:21 -070060 GrPathRenderer* getPathRenderer(const GrPathRenderer::CanDrawPathArgs& args,
61 bool allowSW,
62 GrPathRendererChain::DrawType drawType,
Ben Wagnera93a14a2017-08-28 10:34:05 -040063 GrPathRenderer::StencilSupport* stencilSupport = nullptr);
robertphillips68737822015-10-29 12:12:21 -070064
Brian Salomone7df0bb2018-05-07 14:44:57 -040065 GrPathRenderer* getSoftwarePathRenderer();
66
Chris Daltonfddb6c02017-11-04 15:22:22 -060067 // Returns a direct pointer to the coverage counting path renderer, or null if it is not
68 // supported and turned on.
69 GrCoverageCountingPathRenderer* getCoverageCountingPathRenderer();
70
Brian Salomon653f42f2018-07-10 10:07:31 -040071 void flushIfNecessary();
bsalomonb77a9072016-09-07 10:02:04 -070072
Greg Daniel78325c12017-06-19 16:39:13 -040073 static bool ProgramUnitTest(GrContext* context, int maxStages, int maxLevels);
robertphillipsa13e2022015-11-11 12:01:09 -080074
Greg Daniel51316782017-08-02 15:10:09 +000075 GrSemaphoresSubmitted prepareSurfaceForExternalIO(GrSurfaceProxy*,
Greg Danielbae71212019-03-01 15:24:35 -050076 SkSurface::BackendSurfaceAccess access,
Greg Danielb9990e42019-04-10 16:28:52 -040077 GrFlushFlags flags,
Greg Daniel51316782017-08-02 15:10:09 +000078 int numSemaphores,
79 GrBackendSemaphore backendSemaphores[]);
bsalomon6a2b1942016-09-08 11:28:59 -070080
Chris Daltonfe199b72017-05-05 11:26:15 -040081 void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
Robert Phillipsdbaf3172019-02-06 15:12:53 -050082
83#if GR_TEST_UTILS
Chris Daltonfe199b72017-05-05 11:26:15 -040084 void testingOnly_removeOnFlushCallbackObject(GrOnFlushCallbackObject*);
Robert Phillipsdbaf3172019-02-06 15:12:53 -050085#endif
Robert Phillipseb35f4d2017-03-21 07:56:47 -040086
Robert Phillips62000362018-02-01 09:10:04 -050087 void moveOpListsToDDL(SkDeferredDisplayList* ddl);
88 void copyOpListsFromDDL(const SkDeferredDisplayList*, GrRenderTargetProxy* newDest);
89
robertphillips77a2e522015-10-17 07:43:27 -070090private:
Robert Phillips22310d62018-09-05 11:07:21 -040091 // This class encapsulates maintenance and manipulation of the drawing manager's DAG of opLists.
92 class OpListDAG {
93 public:
Robert Phillips56181ba2019-03-08 12:00:45 -050094 OpListDAG(bool explicitlyAllocating, bool sortOpLists);
Robert Phillips22310d62018-09-05 11:07:21 -040095 ~OpListDAG();
96
97 // Currently, when explicitly allocating resources, this call will topologically sort the
98 // opLists.
99 // MDB TODO: remove once incremental opList sorting is enabled
100 void prepForFlush();
101
102 void closeAll(const GrCaps* caps);
103
104 // A yucky combination of closeAll and reset
105 void cleanup(const GrCaps* caps);
106
107 void gatherIDs(SkSTArray<8, uint32_t, true>* idArray) const;
108
109 void reset();
110
111 // These calls forceably remove an opList from the DAG. They are problematic bc they just
112 // remove the opList but don't cleanup any refering pointers (i.e., dependency pointers
113 // in the DAG). They work right now bc they are only called at flush time, after the
114 // topological sort is complete (so the dangling pointers aren't used).
115 void removeOpList(int index);
116 void removeOpLists(int startIndex, int stopIndex);
117
118 bool empty() const { return fOpLists.empty(); }
119 int numOpLists() const { return fOpLists.count(); }
120
Robert Phillips9313aa72019-04-09 18:41:27 -0400121 bool isUsed(GrSurfaceProxy*) const;
122
Robert Phillips22310d62018-09-05 11:07:21 -0400123 GrOpList* opList(int index) { return fOpLists[index].get(); }
124 const GrOpList* opList(int index) const { return fOpLists[index].get(); }
125
126 GrOpList* back() { return fOpLists.back().get(); }
127 const GrOpList* back() const { return fOpLists.back().get(); }
128
129 void add(sk_sp<GrOpList>);
130 void add(const SkTArray<sk_sp<GrOpList>>&);
131
132 void swap(SkTArray<sk_sp<GrOpList>>* opLists);
133
Robert Phillips46acf9d2018-10-09 09:31:40 -0400134 bool sortingOpLists() const { return fSortOpLists; }
135
Robert Phillips22310d62018-09-05 11:07:21 -0400136 private:
137 SkTArray<sk_sp<GrOpList>> fOpLists;
138 bool fSortOpLists;
139 };
140
Robert Phillips69893702019-02-22 11:16:30 -0500141 GrDrawingManager(GrRecordingContext*, const GrPathRendererChain::Options&,
Robert Phillips0d075de2019-03-04 11:08:13 -0500142 const GrTextContext::Options&,
Robert Phillips56181ba2019-03-08 12:00:45 -0500143 bool explicitlyAllocating,
144 bool sortOpLists,
Robert Phillips60dd62b2019-03-12 16:28:59 +0000145 GrContextOptions::Enable reduceOpListSplitting);
robertphillips77a2e522015-10-17 07:43:27 -0700146
Robert Phillipsa9162df2019-02-11 14:12:03 -0500147 bool wasAbandoned() const;
148
robertphillips77a2e522015-10-17 07:43:27 -0700149 void cleanup();
Robert Phillipseafd48a2017-11-16 07:52:08 -0500150
151 // return true if any opLists were actually executed; false otherwise
Greg Danield2073452018-12-07 11:20:33 -0500152 bool executeOpLists(int startIndex, int stopIndex, GrOpFlushState*, int* numOpListsExecuted);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500153
Greg Daniel51316782017-08-02 15:10:09 +0000154 GrSemaphoresSubmitted flush(GrSurfaceProxy* proxy,
Greg Danielbae71212019-03-01 15:24:35 -0500155 SkSurface::BackendSurfaceAccess access,
Greg Danielb9990e42019-04-10 16:28:52 -0400156 GrFlushFlags flags,
Greg Danielbae71212019-03-01 15:24:35 -0500157 int numSemaphores,
158 GrBackendSemaphore backendSemaphores[]);
robertphillips77a2e522015-10-17 07:43:27 -0700159
Robert Phillips38d64b02018-09-04 13:23:26 -0400160 SkDEBUGCODE(void validate() const);
161
Robert Phillips69893702019-02-22 11:16:30 -0500162 friend class GrContext; // access to: flush & cleanup
Robert Phillips7ee385e2017-03-30 08:02:11 -0400163 friend class GrContextPriv; // access to: flush
Chris Daltonfe199b72017-05-05 11:26:15 -0400164 friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class
Robert Phillips69893702019-02-22 11:16:30 -0500165 friend class GrRecordingContext; // access to: ctor
Greg Danielb6c15ba2019-03-04 13:08:25 -0500166 friend class SkImage; // for access to: flush
robertphillips77a2e522015-10-17 07:43:27 -0700167
168 static const int kNumPixelGeometries = 5; // The different pixel geometries
169 static const int kNumDFTOptions = 2; // DFT or no DFT
170
Robert Phillips69893702019-02-22 11:16:30 -0500171 GrRecordingContext* fContext;
bsalomon6b2552f2016-09-15 13:50:26 -0700172 GrPathRendererChain::Options fOptionsForPathRendererChain;
Herb Derby26cbe512018-05-24 14:39:01 -0400173 GrTextContext::Options fOptionsForTextContext;
Brian Salomon601ac802019-02-07 13:37:16 -0500174 // This cache is used by both the vertex and index pools. It reuses memory across multiple
175 // flushes.
176 sk_sp<GrBufferAllocPool::CpuBufferCache> fCpuBufferCache;
joshualittde8dc7e2016-01-08 10:09:13 -0800177
Robert Phillips22310d62018-09-05 11:07:21 -0400178 OpListDAG fDAG;
Robert Phillips38d64b02018-09-04 13:23:26 -0400179 GrOpList* fActiveOpList = nullptr;
Chris Dalton3968ff92017-11-27 12:26:31 -0700180 // These are the IDs of the opLists currently being flushed (in internalFlush)
181 SkSTArray<8, uint32_t, true> fFlushingOpListIDs;
182 // These are the new opLists generated by the onFlush CBs
183 SkSTArray<8, sk_sp<GrOpList>> fOnFlushCBOpLists;
robertphillips77a2e522015-10-17 07:43:27 -0700184
Herb Derby26cbe512018-05-24 14:39:01 -0400185 std::unique_ptr<GrTextContext> fTextContext;
robertphillipsa13e2022015-11-11 12:01:09 -0800186
Ben Wagner9ec70c62018-07-12 13:30:47 -0400187 std::unique_ptr<GrPathRendererChain> fPathRendererChain;
188 sk_sp<GrSoftwarePathRenderer> fSoftwarePathRenderer;
brianosman86e76262016-08-11 12:17:31 -0700189
Robert Phillips40a29d72018-01-18 12:59:22 -0500190 GrTokenTracker fTokenTracker;
brianosman86e76262016-08-11 12:17:31 -0700191 bool fFlushing;
Robert Phillips46acf9d2018-10-09 09:31:40 -0400192 bool fReduceOpListSplitting;
bsalomonb77a9072016-09-07 10:02:04 -0700193
Chris Daltonfe199b72017-05-05 11:26:15 -0400194 SkTArray<GrOnFlushCallbackObject*> fOnFlushCBObjects;
robertphillips77a2e522015-10-17 07:43:27 -0700195};
196
197#endif