blob: 6c6d6691f6af05d53911de8e8f5660823e1d7df5 [file] [log] [blame]
robertphillips77a2e522015-10-17 07:43:27 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrDrawingManager_DEFINED
9#define GrDrawingManager_DEFINED
10
Robert Phillips15c91422019-05-07 16:54:48 -040011#include <set>
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/core/SkSurface.h"
13#include "include/private/SkTArray.h"
14#include "src/gpu/GrBufferAllocPool.h"
15#include "src/gpu/GrDeferredUpload.h"
16#include "src/gpu/GrPathRenderer.h"
17#include "src/gpu/GrPathRendererChain.h"
18#include "src/gpu/GrResourceCache.h"
19#include "src/gpu/text/GrTextContext.h"
Robert Phillipsf2361d22016-10-25 14:20:06 -040020
Chris Daltonfddb6c02017-11-04 15:22:22 -060021class GrCoverageCountingPathRenderer;
Robert Phillipsfbcef6e2017-06-15 12:07:18 -040022class GrOnFlushCallbackObject;
Herb Derbydc214c22018-11-08 13:31:39 -050023class GrOpFlushState;
Greg Danielf41b2bd2019-08-22 16:19:24 -040024class GrOpsTask;
Robert Phillips69893702019-02-22 11:16:30 -050025class GrRecordingContext;
Brian Osman11052242016-10-27 14:47:55 -040026class GrRenderTargetContext;
Robert Phillipsc7635fa2016-10-28 13:25:24 -040027class GrRenderTargetProxy;
robertphillips68737822015-10-29 12:12:21 -070028class GrSoftwarePathRenderer;
Brian Osman45580d32016-11-23 09:37:01 -050029class GrTextureContext;
Brian Salomon653f42f2018-07-10 10:07:31 -040030class SkDeferredDisplayList;
robertphillips77a2e522015-10-17 07:43:27 -070031
robertphillips77a2e522015-10-17 07:43:27 -070032class GrDrawingManager {
33public:
34 ~GrDrawingManager();
35
robertphillips68737822015-10-29 12:12:21 -070036 void freeGpuResources();
robertphillips77a2e522015-10-17 07:43:27 -070037
Brian Salomonbf6b9792019-08-21 09:38:10 -040038 std::unique_ptr<GrRenderTargetContext> makeRenderTargetContext(sk_sp<GrSurfaceProxy>,
39 GrColorType,
40 sk_sp<SkColorSpace>,
41 const SkSurfaceProps*,
Greg Danielf41b2bd2019-08-22 16:19:24 -040042 bool managedOpsTask = true);
Brian Salomonbf6b9792019-08-21 09:38:10 -040043 std::unique_ptr<GrTextureContext> makeTextureContext(sk_sp<GrSurfaceProxy>,
Brian Salomond6287472019-06-24 15:50:07 -040044 GrColorType,
Brian Salomonbf6b9792019-08-21 09:38:10 -040045 SkAlphaType,
46 sk_sp<SkColorSpace>);
robertphillips77a2e522015-10-17 07:43:27 -070047
Greg Danielf41b2bd2019-08-22 16:19:24 -040048 // A managed opsTask is controlled by the drawing manager (i.e., sorted & flushed with the
Robert Phillips831a2932019-04-12 17:18:39 -040049 // others). An unmanaged one is created and used by the onFlushCallback.
Greg Danielf41b2bd2019-08-22 16:19:24 -040050 sk_sp<GrOpsTask> newOpsTask(sk_sp<GrRenderTargetProxy>, bool managedOpsTask);
robertphillips77a2e522015-10-17 07:43:27 -070051
Chris Dalton3d770272019-08-14 09:24:37 -060052 // Create a new, specialized, render task that will regenerate mipmap levels and/or resolve
Chris Dalton4ece96d2019-08-30 11:26:39 -060053 // MSAA (depending on ResolveFlags). This method will add the new render task to the list of
54 // render tasks and make it depend on the target texture proxy. It is up to the caller to add
55 // any dependencies on the new render task.
Chris Dalton3d770272019-08-14 09:24:37 -060056 GrRenderTask* newTextureResolveRenderTask(
Chris Dalton4ece96d2019-08-30 11:26:39 -060057 sk_sp<GrSurfaceProxy>, GrSurfaceProxy::ResolveFlags, const GrCaps&);
Chris Dalton3d770272019-08-14 09:24:37 -060058
Greg Danielc30f1a92019-09-06 15:28:58 -040059 // Create a new render task that will cause the gpu to wait on semaphores before executing any
60 // more RenderTasks that target proxy. It is possible for this wait to also block additional
61 // work (even to other proxies) that has already been recorded or will be recorded later. The
62 // only guarantee is that future work to the passed in proxy will wait on the semaphores to be
63 // signaled.
64 void newWaitRenderTask(sk_sp<GrSurfaceProxy> proxy, std::unique_ptr<sk_sp<GrSemaphore>[]>,
65 int numSemaphores);
66
Greg Danielbbfec9d2019-08-20 10:56:51 -040067 // Create a new render task which copies the pixels from the srcProxy into the dstBuffer. This
68 // is used to support the asynchronous readback API. The srcRect is the region of the srcProxy
69 // to be copied. The surfaceColorType says how we should interpret the data when reading back
70 // from the source. DstColorType describes how the data should be stored in the dstBuffer.
71 // DstOffset is the offset into the dstBuffer where we will start writing data.
72 void newTransferFromRenderTask(sk_sp<GrSurfaceProxy> srcProxy, const SkIRect& srcRect,
73 GrColorType surfaceColorType, GrColorType dstColorType,
74 sk_sp<GrGpuBuffer> dstBuffer, size_t dstOffset);
75
Greg Daniele227fe42019-08-21 13:52:24 -040076 // Creates a new render task which copies a pixel rectangle from srcProxy into dstProxy. The src
77 // pixels copied are specified by srcRect. They are copied to a rect of the same size in
78 // dstProxy with top left at dstPoint. If the src rect is clipped by the src bounds then pixel
79 // values in the dst rect corresponding to the area clipped by the src rect are not overwritten.
80 // This method is not guaranteed to succeed depending on the type of surface, formats, etc, and
81 // the backend-specific limitations.
82 bool newCopyRenderTask(sk_sp<GrSurfaceProxy> srcProxy, const SkIRect& srcRect,
83 sk_sp<GrSurfaceProxy> dstProxy, const SkIPoint& dstPoint);
84
Robert Phillips69893702019-02-22 11:16:30 -050085 GrRecordingContext* getContext() { return fContext; }
robertphillips77a2e522015-10-17 07:43:27 -070086
Herb Derby26cbe512018-05-24 14:39:01 -040087 GrTextContext* getTextContext();
brianosman86e76262016-08-11 12:17:31 -070088
robertphillips68737822015-10-29 12:12:21 -070089 GrPathRenderer* getPathRenderer(const GrPathRenderer::CanDrawPathArgs& args,
90 bool allowSW,
91 GrPathRendererChain::DrawType drawType,
Ben Wagnera93a14a2017-08-28 10:34:05 -040092 GrPathRenderer::StencilSupport* stencilSupport = nullptr);
robertphillips68737822015-10-29 12:12:21 -070093
Brian Salomone7df0bb2018-05-07 14:44:57 -040094 GrPathRenderer* getSoftwarePathRenderer();
95
Chris Daltonfddb6c02017-11-04 15:22:22 -060096 // Returns a direct pointer to the coverage counting path renderer, or null if it is not
97 // supported and turned on.
98 GrCoverageCountingPathRenderer* getCoverageCountingPathRenderer();
99
Brian Salomon653f42f2018-07-10 10:07:31 -0400100 void flushIfNecessary();
bsalomonb77a9072016-09-07 10:02:04 -0700101
Greg Daniel78325c12017-06-19 16:39:13 -0400102 static bool ProgramUnitTest(GrContext* context, int maxStages, int maxLevels);
robertphillipsa13e2022015-11-11 12:01:09 -0800103
Brian Salomonf9a1fdf2019-05-09 10:30:12 -0400104 GrSemaphoresSubmitted flushSurfaces(GrSurfaceProxy* proxies[],
105 int cnt,
106 SkSurface::BackendSurfaceAccess access,
107 const GrFlushInfo& info);
108 GrSemaphoresSubmitted flushSurface(GrSurfaceProxy* proxy,
Greg Daniel4aa13e72019-04-15 14:42:20 -0400109 SkSurface::BackendSurfaceAccess access,
Brian Salomonf9a1fdf2019-05-09 10:30:12 -0400110 const GrFlushInfo& info) {
111 return this->flushSurfaces(&proxy, 1, access, info);
112 }
bsalomon6a2b1942016-09-08 11:28:59 -0700113
Chris Daltonfe199b72017-05-05 11:26:15 -0400114 void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500115
116#if GR_TEST_UTILS
Chris Daltonfe199b72017-05-05 11:26:15 -0400117 void testingOnly_removeOnFlushCallbackObject(GrOnFlushCallbackObject*);
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500118#endif
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400119
Chris Dalton6b498102019-08-01 14:14:52 -0600120 void moveRenderTasksToDDL(SkDeferredDisplayList* ddl);
121 void copyRenderTasksFromDDL(const SkDeferredDisplayList*, GrRenderTargetProxy* newDest);
Robert Phillips62000362018-02-01 09:10:04 -0500122
robertphillips77a2e522015-10-17 07:43:27 -0700123private:
Chris Dalton6b498102019-08-01 14:14:52 -0600124 // This class encapsulates maintenance and manipulation of the drawing manager's DAG of
125 // renderTasks.
126 class RenderTaskDAG {
Robert Phillips22310d62018-09-05 11:07:21 -0400127 public:
Chris Dalton6b498102019-08-01 14:14:52 -0600128 RenderTaskDAG(bool sortRenderTasks);
129 ~RenderTaskDAG();
Robert Phillips22310d62018-09-05 11:07:21 -0400130
131 // Currently, when explicitly allocating resources, this call will topologically sort the
Greg Danielf41b2bd2019-08-22 16:19:24 -0400132 // GrRenderTasks.
133 // MDB TODO: remove once incremental GrRenderTask sorting is enabled
Robert Phillips22310d62018-09-05 11:07:21 -0400134 void prepForFlush();
135
136 void closeAll(const GrCaps* caps);
137
138 // A yucky combination of closeAll and reset
139 void cleanup(const GrCaps* caps);
140
141 void gatherIDs(SkSTArray<8, uint32_t, true>* idArray) const;
142
143 void reset();
144
Greg Danielf41b2bd2019-08-22 16:19:24 -0400145 // These calls forceably remove a GrRenderTask from the DAG. They are problematic bc they
146 // just remove the GrRenderTask but don't cleanup any refering pointers (i.e., dependency
147 // pointers in the DAG). They work right now bc they are only called at flush time, after
148 // the topological sort is complete (so the dangling pointers aren't used).
Chris Dalton6b498102019-08-01 14:14:52 -0600149 void removeRenderTask(int index);
150 void removeRenderTasks(int startIndex, int stopIndex);
Robert Phillips22310d62018-09-05 11:07:21 -0400151
Chris Dalton6b498102019-08-01 14:14:52 -0600152 bool empty() const { return fRenderTasks.empty(); }
153 int numRenderTasks() const { return fRenderTasks.count(); }
Robert Phillips22310d62018-09-05 11:07:21 -0400154
Robert Phillips9313aa72019-04-09 18:41:27 -0400155 bool isUsed(GrSurfaceProxy*) const;
156
Chris Dalton6b498102019-08-01 14:14:52 -0600157 GrRenderTask* renderTask(int index) { return fRenderTasks[index].get(); }
158 const GrRenderTask* renderTask(int index) const { return fRenderTasks[index].get(); }
Robert Phillips22310d62018-09-05 11:07:21 -0400159
Chris Dalton6b498102019-08-01 14:14:52 -0600160 GrRenderTask* back() { return fRenderTasks.back().get(); }
161 const GrRenderTask* back() const { return fRenderTasks.back().get(); }
Robert Phillips22310d62018-09-05 11:07:21 -0400162
Chris Dalton3d770272019-08-14 09:24:37 -0600163 GrRenderTask* add(sk_sp<GrRenderTask>);
164 GrRenderTask* addBeforeLast(sk_sp<GrRenderTask>);
Chris Dalton6b498102019-08-01 14:14:52 -0600165 void add(const SkTArray<sk_sp<GrRenderTask>>&);
Robert Phillips22310d62018-09-05 11:07:21 -0400166
Chris Dalton6b498102019-08-01 14:14:52 -0600167 void swap(SkTArray<sk_sp<GrRenderTask>>* renderTasks);
Robert Phillips22310d62018-09-05 11:07:21 -0400168
Chris Dalton6b498102019-08-01 14:14:52 -0600169 bool sortingRenderTasks() const { return fSortRenderTasks; }
Robert Phillips46acf9d2018-10-09 09:31:40 -0400170
Robert Phillips22310d62018-09-05 11:07:21 -0400171 private:
Chris Dalton6b498102019-08-01 14:14:52 -0600172 SkTArray<sk_sp<GrRenderTask>> fRenderTasks;
173 bool fSortRenderTasks;
Robert Phillips22310d62018-09-05 11:07:21 -0400174 };
175
Robert Phillips69893702019-02-22 11:16:30 -0500176 GrDrawingManager(GrRecordingContext*, const GrPathRendererChain::Options&,
Robert Phillips0d075de2019-03-04 11:08:13 -0500177 const GrTextContext::Options&,
Chris Dalton6b498102019-08-01 14:14:52 -0600178 bool sortRenderTasks,
Greg Danielf41b2bd2019-08-22 16:19:24 -0400179 bool reduceOpsTaskSplitting);
robertphillips77a2e522015-10-17 07:43:27 -0700180
Robert Phillipsa9162df2019-02-11 14:12:03 -0500181 bool wasAbandoned() const;
182
robertphillips77a2e522015-10-17 07:43:27 -0700183 void cleanup();
Robert Phillipseafd48a2017-11-16 07:52:08 -0500184
Greg Danielf41b2bd2019-08-22 16:19:24 -0400185 // Closes the target's dependent render tasks (or, if not in sorting/opsTask-splitting-reduction
186 // mode, closes fActiveOpsTask) in preparation for us opening a new opsTask that will write to
Chris Dalton5fe99772019-08-06 11:57:39 -0600187 // 'target'.
Greg Danielbbfec9d2019-08-20 10:56:51 -0400188 void closeRenderTasksForNewRenderTask(GrSurfaceProxy* target);
Chris Dalton5fe99772019-08-06 11:57:39 -0600189
Greg Danielf41b2bd2019-08-22 16:19:24 -0400190 // return true if any GrRenderTasks were actually executed; false otherwise
Chris Dalton6b498102019-08-01 14:14:52 -0600191 bool executeRenderTasks(int startIndex, int stopIndex, GrOpFlushState*,
192 int* numRenderTasksExecuted);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500193
Brian Salomonf9a1fdf2019-05-09 10:30:12 -0400194 GrSemaphoresSubmitted flush(GrSurfaceProxy* proxies[],
195 int numProxies,
Greg Danielbae71212019-03-01 15:24:35 -0500196 SkSurface::BackendSurfaceAccess access,
Greg Daniel797efca2019-05-09 14:04:20 -0400197 const GrFlushInfo&,
198 const GrPrepareForExternalIORequests&);
robertphillips77a2e522015-10-17 07:43:27 -0700199
Robert Phillips38d64b02018-09-04 13:23:26 -0400200 SkDEBUGCODE(void validate() const);
201
Robert Phillips69893702019-02-22 11:16:30 -0500202 friend class GrContext; // access to: flush & cleanup
Robert Phillips7ee385e2017-03-30 08:02:11 -0400203 friend class GrContextPriv; // access to: flush
Chris Daltonfe199b72017-05-05 11:26:15 -0400204 friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class
Robert Phillips69893702019-02-22 11:16:30 -0500205 friend class GrRecordingContext; // access to: ctor
Greg Danielb6c15ba2019-03-04 13:08:25 -0500206 friend class SkImage; // for access to: flush
robertphillips77a2e522015-10-17 07:43:27 -0700207
208 static const int kNumPixelGeometries = 5; // The different pixel geometries
209 static const int kNumDFTOptions = 2; // DFT or no DFT
210
Robert Phillips69893702019-02-22 11:16:30 -0500211 GrRecordingContext* fContext;
bsalomon6b2552f2016-09-15 13:50:26 -0700212 GrPathRendererChain::Options fOptionsForPathRendererChain;
Herb Derby26cbe512018-05-24 14:39:01 -0400213 GrTextContext::Options fOptionsForTextContext;
Brian Salomon601ac802019-02-07 13:37:16 -0500214 // This cache is used by both the vertex and index pools. It reuses memory across multiple
215 // flushes.
216 sk_sp<GrBufferAllocPool::CpuBufferCache> fCpuBufferCache;
joshualittde8dc7e2016-01-08 10:09:13 -0800217
Chris Dalton6b498102019-08-01 14:14:52 -0600218 RenderTaskDAG fDAG;
Greg Danielf41b2bd2019-08-22 16:19:24 -0400219 GrOpsTask* fActiveOpsTask = nullptr;
220 // These are the IDs of the opsTask currently being flushed (in internalFlush)
Chris Dalton6b498102019-08-01 14:14:52 -0600221 SkSTArray<8, uint32_t, true> fFlushingRenderTaskIDs;
Chris Daltonc4b47352019-08-23 10:10:36 -0600222 // These are the new renderTasks generated by the onFlush CBs
223 SkSTArray<4, sk_sp<GrRenderTask>> fOnFlushRenderTasks;
robertphillips77a2e522015-10-17 07:43:27 -0700224
Herb Derby26cbe512018-05-24 14:39:01 -0400225 std::unique_ptr<GrTextContext> fTextContext;
robertphillipsa13e2022015-11-11 12:01:09 -0800226
Ben Wagner9ec70c62018-07-12 13:30:47 -0400227 std::unique_ptr<GrPathRendererChain> fPathRendererChain;
228 sk_sp<GrSoftwarePathRenderer> fSoftwarePathRenderer;
brianosman86e76262016-08-11 12:17:31 -0700229
Robert Phillips40a29d72018-01-18 12:59:22 -0500230 GrTokenTracker fTokenTracker;
brianosman86e76262016-08-11 12:17:31 -0700231 bool fFlushing;
Greg Danielf41b2bd2019-08-22 16:19:24 -0400232 bool fReduceOpsTaskSplitting;
bsalomonb77a9072016-09-07 10:02:04 -0700233
Chris Daltonfe199b72017-05-05 11:26:15 -0400234 SkTArray<GrOnFlushCallbackObject*> fOnFlushCBObjects;
Robert Phillips15c91422019-05-07 16:54:48 -0400235
236 void addDDLTarget(GrSurfaceProxy* proxy) { fDDLTargets.insert(proxy); }
237 bool isDDLTarget(GrSurfaceProxy* proxy) { return fDDLTargets.find(proxy) != fDDLTargets.end(); }
238 void clearDDLTargets() { fDDLTargets.clear(); }
239
240 // We play a trick with lazy proxies to retarget the base target of a DDL to the SkSurface
241 // it is replayed on. Because of this remapping we need to explicitly store the targets of
242 // DDL replaying.
243 // Note: we do not expect a whole lot of these per flush
244 std::set<GrSurfaceProxy*> fDDLTargets;
robertphillips77a2e522015-10-17 07:43:27 -0700245};
246
247#endif