blob: b91dd49e7dacdb63341a20bc0cb3e694b72d6a25 [file] [log] [blame]
robertphillips77a2e522015-10-17 07:43:27 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrDrawingManager_DEFINED
9#define GrDrawingManager_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkSurface.h"
12#include "include/private/SkTArray.h"
Adlai Hollerd71b7b02020-06-08 15:55:00 -040013#include "include/private/SkTHash.h"
Adlai Hollerc2bfcff2020-11-06 15:39:36 -050014#include "src/core/SkSpan.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrBufferAllocPool.h"
16#include "src/gpu/GrDeferredUpload.h"
Robert Phillipsc7ed7e62020-06-29 20:04:57 +000017#include "src/gpu/GrHashMapWithCache.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrPathRenderer.h"
19#include "src/gpu/GrPathRendererChain.h"
20#include "src/gpu/GrResourceCache.h"
Herb Derby082232b2020-06-10 15:08:18 -040021#include "src/gpu/GrSurfaceProxy.h"
Robert Phillipsf2361d22016-10-25 14:20:06 -040022
Robert Phillipsd81379d2020-04-21 10:39:02 -040023// Enabling this will print out which path renderers are being chosen
24#define GR_PATH_RENDERER_SPEW 0
25
Chris Daltonfddb6c02017-11-04 15:22:22 -060026class GrCoverageCountingPathRenderer;
Herb Derby082232b2020-06-10 15:08:18 -040027class GrGpuBuffer;
Robert Phillipsfbcef6e2017-06-15 12:07:18 -040028class GrOnFlushCallbackObject;
Herb Derbydc214c22018-11-08 13:31:39 -050029class GrOpFlushState;
Greg Danielf41b2bd2019-08-22 16:19:24 -040030class GrOpsTask;
Robert Phillips69893702019-02-22 11:16:30 -050031class GrRecordingContext;
Brian Salomoneebe7352020-12-09 16:37:04 -050032class GrSurfaceDrawContext;
Robert Phillipsc7635fa2016-10-28 13:25:24 -040033class GrRenderTargetProxy;
Herb Derby082232b2020-06-10 15:08:18 -040034class GrRenderTask;
35class GrSemaphore;
robertphillips68737822015-10-29 12:12:21 -070036class GrSoftwarePathRenderer;
Greg Daniel46e366a2019-12-16 14:38:36 -050037class GrSurfaceContext;
Greg Daniel16f5c652019-10-29 11:26:01 -040038class GrSurfaceProxyView;
Chris Daltone2a903e2019-09-18 13:41:50 -060039class GrTextureResolveRenderTask;
Brian Salomon653f42f2018-07-10 10:07:31 -040040class SkDeferredDisplayList;
robertphillips77a2e522015-10-17 07:43:27 -070041
robertphillips77a2e522015-10-17 07:43:27 -070042class GrDrawingManager {
43public:
44 ~GrDrawingManager();
45
robertphillips68737822015-10-29 12:12:21 -070046 void freeGpuResources();
robertphillips77a2e522015-10-17 07:43:27 -070047
Robert Phillips19006652020-11-19 14:20:57 -050048 // OpsTasks created at flush time are stored and handled different from the others.
49 sk_sp<GrOpsTask> newOpsTask(GrSurfaceProxyView, bool flushTimeOpsTask);
robertphillips77a2e522015-10-17 07:43:27 -070050
Chris Daltone2a903e2019-09-18 13:41:50 -060051 // Create a render task that can resolve MSAA and/or regenerate mipmap levels on proxies. This
52 // method will only add the new render task to the list. It is up to the caller to call
53 // addProxy() on the returned object.
Adlai Holler039f90c2020-11-19 15:20:31 +000054 GrTextureResolveRenderTask* newTextureResolveRenderTask(const GrCaps&);
Chris Dalton3d770272019-08-14 09:24:37 -060055
Greg Danielc30f1a92019-09-06 15:28:58 -040056 // Create a new render task that will cause the gpu to wait on semaphores before executing any
57 // more RenderTasks that target proxy. It is possible for this wait to also block additional
58 // work (even to other proxies) that has already been recorded or will be recorded later. The
59 // only guarantee is that future work to the passed in proxy will wait on the semaphores to be
60 // signaled.
Greg Daniel301015c2019-11-18 14:06:46 -050061 void newWaitRenderTask(sk_sp<GrSurfaceProxy> proxy,
62 std::unique_ptr<std::unique_ptr<GrSemaphore>[]>,
Greg Danielc30f1a92019-09-06 15:28:58 -040063 int numSemaphores);
64
Greg Danielbbfec9d2019-08-20 10:56:51 -040065 // Create a new render task which copies the pixels from the srcProxy into the dstBuffer. This
66 // is used to support the asynchronous readback API. The srcRect is the region of the srcProxy
67 // to be copied. The surfaceColorType says how we should interpret the data when reading back
68 // from the source. DstColorType describes how the data should be stored in the dstBuffer.
69 // DstOffset is the offset into the dstBuffer where we will start writing data.
70 void newTransferFromRenderTask(sk_sp<GrSurfaceProxy> srcProxy, const SkIRect& srcRect,
71 GrColorType surfaceColorType, GrColorType dstColorType,
72 sk_sp<GrGpuBuffer> dstBuffer, size_t dstOffset);
73
Greg Daniel16f5c652019-10-29 11:26:01 -040074 // Creates a new render task which copies a pixel rectangle from srcView into dstView. The src
Greg Daniele227fe42019-08-21 13:52:24 -040075 // pixels copied are specified by srcRect. They are copied to a rect of the same size in
76 // dstProxy with top left at dstPoint. If the src rect is clipped by the src bounds then pixel
77 // values in the dst rect corresponding to the area clipped by the src rect are not overwritten.
78 // This method is not guaranteed to succeed depending on the type of surface, formats, etc, and
79 // the backend-specific limitations.
Brian Salomon982127b2021-01-21 10:43:35 -050080 bool newCopyRenderTask(sk_sp<GrSurfaceProxy> src,
81 SkIRect srcRect,
82 sk_sp<GrSurfaceProxy> dst,
Brian Salomon0f9f8002021-01-22 16:30:50 -050083 SkIPoint dstPoint,
84 GrSurfaceOrigin);
Greg Daniele227fe42019-08-21 13:52:24 -040085
Brian Salomonbe1084b2021-01-26 13:29:30 -050086 // Adds a task that writes the data from the passed GrMipLevels to dst. The lifetime of the
87 // pixel data in the levels should be tied to the passed SkData. srcColorType is the color
88 // type of the GrMipLevels. dstColorType is the color type being used with dst and must
89 // be compatible with dst's format according to GrCaps::areColorTypeAndFormatCompatible().
90 bool newWritePixelsTask(sk_sp<GrSurfaceProxy> dst,
91 SkIRect rect,
92 GrColorType srcColorType,
93 GrColorType dstColorType,
94 const GrMipLevel[],
95 int levelCount,
96 sk_sp<SkData> storage);
97
Robert Phillips69893702019-02-22 11:16:30 -050098 GrRecordingContext* getContext() { return fContext; }
robertphillips77a2e522015-10-17 07:43:27 -070099
robertphillips68737822015-10-29 12:12:21 -0700100 GrPathRenderer* getPathRenderer(const GrPathRenderer::CanDrawPathArgs& args,
101 bool allowSW,
102 GrPathRendererChain::DrawType drawType,
Ben Wagnera93a14a2017-08-28 10:34:05 -0400103 GrPathRenderer::StencilSupport* stencilSupport = nullptr);
robertphillips68737822015-10-29 12:12:21 -0700104
Brian Salomone7df0bb2018-05-07 14:44:57 -0400105 GrPathRenderer* getSoftwarePathRenderer();
106
Chris Daltonfddb6c02017-11-04 15:22:22 -0600107 // Returns a direct pointer to the coverage counting path renderer, or null if it is not
108 // supported and turned on.
109 GrCoverageCountingPathRenderer* getCoverageCountingPathRenderer();
110
Chris Daltone6ae4762021-02-05 14:56:21 -0700111 // Returns a direct pointer to the tessellation path renderer, or null if it is not supported
112 // and turned on.
113 GrPathRenderer* getTessellationPathRenderer();
114
Brian Salomon653f42f2018-07-10 10:07:31 -0400115 void flushIfNecessary();
bsalomonb77a9072016-09-07 10:02:04 -0700116
Robert Phillips4e105e22020-07-16 09:18:50 -0400117 static bool ProgramUnitTest(GrDirectContext*, int maxStages, int maxLevels);
robertphillipsa13e2022015-11-11 12:01:09 -0800118
Adlai Hollerc2bfcff2020-11-06 15:39:36 -0500119 GrSemaphoresSubmitted flushSurfaces(SkSpan<GrSurfaceProxy*>,
120 SkSurface::BackendSurfaceAccess,
121 const GrFlushInfo&,
Greg Daniel9efe3862020-06-11 11:51:06 -0400122 const GrBackendSurfaceMutableState* newState);
bsalomon6a2b1942016-09-08 11:28:59 -0700123
Chris Daltonfe199b72017-05-05 11:26:15 -0400124 void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500125
126#if GR_TEST_UTILS
Chris Daltonfe199b72017-05-05 11:26:15 -0400127 void testingOnly_removeOnFlushCallbackObject(GrOnFlushCallbackObject*);
Chris Dalton31634282020-09-17 12:16:54 -0600128 GrPathRendererChain::Options testingOnly_getOptionsForPathRendererChain() {
129 return fOptionsForPathRendererChain;
130 }
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500131#endif
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400132
Adlai Hollerd71b7b02020-06-08 15:55:00 -0400133 GrRenderTask* getLastRenderTask(const GrSurfaceProxy*) const;
134 GrOpsTask* getLastOpsTask(const GrSurfaceProxy*) const;
135 void setLastRenderTask(const GrSurfaceProxy*, GrRenderTask*);
136
Chris Dalton6b498102019-08-01 14:14:52 -0600137 void moveRenderTasksToDDL(SkDeferredDisplayList* ddl);
Robert Phillipseb54bb52021-01-08 17:20:18 -0500138 void createDDLTask(sk_sp<const SkDeferredDisplayList>,
139 sk_sp<GrRenderTargetProxy> newDest,
Robert Phillips88b29612020-11-16 15:15:08 -0500140 SkIPoint offset);
Robert Phillips62000362018-02-01 09:10:04 -0500141
robertphillips77a2e522015-10-17 07:43:27 -0700142private:
Herb Derby082232b2020-06-10 15:08:18 -0400143 GrDrawingManager(GrRecordingContext*,
144 const GrPathRendererChain::Options&,
Greg Danielf41b2bd2019-08-22 16:19:24 -0400145 bool reduceOpsTaskSplitting);
robertphillips77a2e522015-10-17 07:43:27 -0700146
Robert Phillipsa9162df2019-02-11 14:12:03 -0500147 bool wasAbandoned() const;
148
Adlai Holler4ef108e2020-12-30 16:21:13 +0000149 void closeActiveOpsTask();
Chris Dalton5fe99772019-08-06 11:57:39 -0600150
Greg Danielf41b2bd2019-08-22 16:19:24 -0400151 // return true if any GrRenderTasks were actually executed; false otherwise
Chris Dalton6b498102019-08-01 14:14:52 -0600152 bool executeRenderTasks(int startIndex, int stopIndex, GrOpFlushState*,
153 int* numRenderTasksExecuted);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500154
Adlai Hollerd71b7b02020-06-08 15:55:00 -0400155 void removeRenderTasks(int startIndex, int stopIndex);
156
Adlai Holler33432272020-11-11 13:54:37 -0500157 void sortTasks();
Adlai Holler78036082021-01-07 11:41:33 -0500158 void reorderTasks();
Adlai Holler33432272020-11-11 13:54:37 -0500159
160 void closeAllTasks();
161
162 GrRenderTask* appendTask(sk_sp<GrRenderTask>);
Adlai Holler33432272020-11-11 13:54:37 -0500163 GrRenderTask* insertTaskBeforeLast(sk_sp<GrRenderTask>);
164
Adlai Hollerc2bfcff2020-11-06 15:39:36 -0500165 bool flush(SkSpan<GrSurfaceProxy*> proxies,
Greg Danielfe159622020-04-10 17:43:51 +0000166 SkSurface::BackendSurfaceAccess access,
Greg Daniel9efe3862020-06-11 11:51:06 -0400167 const GrFlushInfo&,
168 const GrBackendSurfaceMutableState* newState);
Greg Danielfe159622020-04-10 17:43:51 +0000169
170 bool submitToGpu(bool syncToCpu);
robertphillips77a2e522015-10-17 07:43:27 -0700171
Robert Phillips38d64b02018-09-04 13:23:26 -0400172 SkDEBUGCODE(void validate() const);
173
Adlai Holler3acc69a2020-10-13 08:20:51 -0400174 friend class GrDirectContext; // access to: flush & cleanup
Adlai Hollera0693042020-10-14 11:23:11 -0400175 friend class GrDirectContextPriv; // access to: flush
Chris Daltonfe199b72017-05-05 11:26:15 -0400176 friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class
Robert Phillips69893702019-02-22 11:16:30 -0500177 friend class GrRecordingContext; // access to: ctor
Greg Danielb6c15ba2019-03-04 13:08:25 -0500178 friend class SkImage; // for access to: flush
robertphillips77a2e522015-10-17 07:43:27 -0700179
180 static const int kNumPixelGeometries = 5; // The different pixel geometries
181 static const int kNumDFTOptions = 2; // DFT or no DFT
182
Robert Phillips69893702019-02-22 11:16:30 -0500183 GrRecordingContext* fContext;
bsalomon6b2552f2016-09-15 13:50:26 -0700184 GrPathRendererChain::Options fOptionsForPathRendererChain;
Herb Derby082232b2020-06-10 15:08:18 -0400185
Brian Salomon601ac802019-02-07 13:37:16 -0500186 // This cache is used by both the vertex and index pools. It reuses memory across multiple
187 // flushes.
188 sk_sp<GrBufferAllocPool::CpuBufferCache> fCpuBufferCache;
joshualittde8dc7e2016-01-08 10:09:13 -0800189
Adlai Holler33432272020-11-11 13:54:37 -0500190 SkTArray<sk_sp<GrRenderTask>> fDAG;
Adlai Holler039f90c2020-11-19 15:20:31 +0000191 GrOpsTask* fActiveOpsTask = nullptr;
Robert Phillips07f675d2020-11-16 13:44:01 -0500192 // These are the IDs of the opsTask currently being flushed (in internalFlush). They are
193 // only stored here to prevent memory thrashing.
Chris Dalton6b498102019-08-01 14:14:52 -0600194 SkSTArray<8, uint32_t, true> fFlushingRenderTaskIDs;
Chris Daltonc4b47352019-08-23 10:10:36 -0600195 // These are the new renderTasks generated by the onFlush CBs
196 SkSTArray<4, sk_sp<GrRenderTask>> fOnFlushRenderTasks;
robertphillips77a2e522015-10-17 07:43:27 -0700197
Ben Wagner9ec70c62018-07-12 13:30:47 -0400198 std::unique_ptr<GrPathRendererChain> fPathRendererChain;
199 sk_sp<GrSoftwarePathRenderer> fSoftwarePathRenderer;
brianosman86e76262016-08-11 12:17:31 -0700200
Robert Phillips40a29d72018-01-18 12:59:22 -0500201 GrTokenTracker fTokenTracker;
brianosman86e76262016-08-11 12:17:31 -0700202 bool fFlushing;
Adlai Holler245c59a2020-12-30 16:20:25 +0000203 const bool fReduceOpsTaskSplitting;
bsalomonb77a9072016-09-07 10:02:04 -0700204
Chris Daltonfe199b72017-05-05 11:26:15 -0400205 SkTArray<GrOnFlushCallbackObject*> fOnFlushCBObjects;
Robert Phillips15c91422019-05-07 16:54:48 -0400206
Robert Phillipsc7ed7e62020-06-29 20:04:57 +0000207 struct SurfaceIDKeyTraits {
208 static uint32_t GetInvalidKey() {
209 return GrSurfaceProxy::UniqueID::InvalidID().asUInt();
Adlai Holler1bdbe552020-06-12 11:28:15 -0400210 }
211 };
212
Robert Phillipsc7ed7e62020-06-29 20:04:57 +0000213 GrHashMapWithCache<uint32_t, GrRenderTask*, SurfaceIDKeyTraits, GrCheapHash> fLastRenderTasks;
robertphillips77a2e522015-10-17 07:43:27 -0700214};
215
216#endif