robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrDrawingManager_DEFINED |
| 9 | #define GrDrawingManager_DEFINED |
| 10 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 11 | #include "include/core/SkSurface.h" |
| 12 | #include "include/private/SkTArray.h" |
Adlai Holler | d71b7b0 | 2020-06-08 15:55:00 -0400 | [diff] [blame] | 13 | #include "include/private/SkTHash.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "src/gpu/GrBufferAllocPool.h" |
| 15 | #include "src/gpu/GrDeferredUpload.h" |
Robert Phillips | c7ed7e6 | 2020-06-29 20:04:57 +0000 | [diff] [blame] | 16 | #include "src/gpu/GrHashMapWithCache.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 17 | #include "src/gpu/GrPathRenderer.h" |
| 18 | #include "src/gpu/GrPathRendererChain.h" |
| 19 | #include "src/gpu/GrResourceCache.h" |
Herb Derby | 082232b | 2020-06-10 15:08:18 -0400 | [diff] [blame] | 20 | #include "src/gpu/GrSurfaceProxy.h" |
Robert Phillips | f2361d2 | 2016-10-25 14:20:06 -0400 | [diff] [blame] | 21 | |
Robert Phillips | d81379d | 2020-04-21 10:39:02 -0400 | [diff] [blame] | 22 | // Enabling this will print out which path renderers are being chosen |
| 23 | #define GR_PATH_RENDERER_SPEW 0 |
| 24 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 25 | class GrCoverageCountingPathRenderer; |
Herb Derby | 082232b | 2020-06-10 15:08:18 -0400 | [diff] [blame] | 26 | class GrGpuBuffer; |
Robert Phillips | fbcef6e | 2017-06-15 12:07:18 -0400 | [diff] [blame] | 27 | class GrOnFlushCallbackObject; |
Herb Derby | dc214c2 | 2018-11-08 13:31:39 -0500 | [diff] [blame] | 28 | class GrOpFlushState; |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 29 | class GrOpsTask; |
Robert Phillips | 6989370 | 2019-02-22 11:16:30 -0500 | [diff] [blame] | 30 | class GrRecordingContext; |
Brian Osman | 1105224 | 2016-10-27 14:47:55 -0400 | [diff] [blame] | 31 | class GrRenderTargetContext; |
Robert Phillips | c7635fa | 2016-10-28 13:25:24 -0400 | [diff] [blame] | 32 | class GrRenderTargetProxy; |
Herb Derby | 082232b | 2020-06-10 15:08:18 -0400 | [diff] [blame] | 33 | class GrRenderTask; |
| 34 | class GrSemaphore; |
robertphillips | 6873782 | 2015-10-29 12:12:21 -0700 | [diff] [blame] | 35 | class GrSoftwarePathRenderer; |
Greg Daniel | 46e366a | 2019-12-16 14:38:36 -0500 | [diff] [blame] | 36 | class GrSurfaceContext; |
Greg Daniel | 16f5c65 | 2019-10-29 11:26:01 -0400 | [diff] [blame] | 37 | class GrSurfaceProxyView; |
Chris Dalton | e2a903e | 2019-09-18 13:41:50 -0600 | [diff] [blame] | 38 | class GrTextureResolveRenderTask; |
Brian Salomon | 653f42f | 2018-07-10 10:07:31 -0400 | [diff] [blame] | 39 | class SkDeferredDisplayList; |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 40 | |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 41 | class GrDrawingManager { |
| 42 | public: |
| 43 | ~GrDrawingManager(); |
| 44 | |
robertphillips | 6873782 | 2015-10-29 12:12:21 -0700 | [diff] [blame] | 45 | void freeGpuResources(); |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 46 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 47 | // A managed opsTask is controlled by the drawing manager (i.e., sorted & flushed with the |
Robert Phillips | 831a293 | 2019-04-12 17:18:39 -0400 | [diff] [blame] | 48 | // others). An unmanaged one is created and used by the onFlushCallback. |
Greg Daniel | 16f5c65 | 2019-10-29 11:26:01 -0400 | [diff] [blame] | 49 | sk_sp<GrOpsTask> newOpsTask(GrSurfaceProxyView, bool managedOpsTask); |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 50 | |
Chris Dalton | e2a903e | 2019-09-18 13:41:50 -0600 | [diff] [blame] | 51 | // Create a render task that can resolve MSAA and/or regenerate mipmap levels on proxies. This |
| 52 | // method will only add the new render task to the list. It is up to the caller to call |
| 53 | // addProxy() on the returned object. |
| 54 | GrTextureResolveRenderTask* newTextureResolveRenderTask(const GrCaps&); |
Chris Dalton | 3d77027 | 2019-08-14 09:24:37 -0600 | [diff] [blame] | 55 | |
Greg Daniel | c30f1a9 | 2019-09-06 15:28:58 -0400 | [diff] [blame] | 56 | // Create a new render task that will cause the gpu to wait on semaphores before executing any |
| 57 | // more RenderTasks that target proxy. It is possible for this wait to also block additional |
| 58 | // work (even to other proxies) that has already been recorded or will be recorded later. The |
| 59 | // only guarantee is that future work to the passed in proxy will wait on the semaphores to be |
| 60 | // signaled. |
Greg Daniel | 301015c | 2019-11-18 14:06:46 -0500 | [diff] [blame] | 61 | void newWaitRenderTask(sk_sp<GrSurfaceProxy> proxy, |
| 62 | std::unique_ptr<std::unique_ptr<GrSemaphore>[]>, |
Greg Daniel | c30f1a9 | 2019-09-06 15:28:58 -0400 | [diff] [blame] | 63 | int numSemaphores); |
| 64 | |
Greg Daniel | bbfec9d | 2019-08-20 10:56:51 -0400 | [diff] [blame] | 65 | // Create a new render task which copies the pixels from the srcProxy into the dstBuffer. This |
| 66 | // is used to support the asynchronous readback API. The srcRect is the region of the srcProxy |
| 67 | // to be copied. The surfaceColorType says how we should interpret the data when reading back |
| 68 | // from the source. DstColorType describes how the data should be stored in the dstBuffer. |
| 69 | // DstOffset is the offset into the dstBuffer where we will start writing data. |
| 70 | void newTransferFromRenderTask(sk_sp<GrSurfaceProxy> srcProxy, const SkIRect& srcRect, |
| 71 | GrColorType surfaceColorType, GrColorType dstColorType, |
| 72 | sk_sp<GrGpuBuffer> dstBuffer, size_t dstOffset); |
| 73 | |
Greg Daniel | 16f5c65 | 2019-10-29 11:26:01 -0400 | [diff] [blame] | 74 | // Creates a new render task which copies a pixel rectangle from srcView into dstView. The src |
Greg Daniel | e227fe4 | 2019-08-21 13:52:24 -0400 | [diff] [blame] | 75 | // pixels copied are specified by srcRect. They are copied to a rect of the same size in |
| 76 | // dstProxy with top left at dstPoint. If the src rect is clipped by the src bounds then pixel |
| 77 | // values in the dst rect corresponding to the area clipped by the src rect are not overwritten. |
| 78 | // This method is not guaranteed to succeed depending on the type of surface, formats, etc, and |
| 79 | // the backend-specific limitations. |
Greg Daniel | 16f5c65 | 2019-10-29 11:26:01 -0400 | [diff] [blame] | 80 | bool newCopyRenderTask(GrSurfaceProxyView srcView, const SkIRect& srcRect, |
| 81 | GrSurfaceProxyView dstView, const SkIPoint& dstPoint); |
Greg Daniel | e227fe4 | 2019-08-21 13:52:24 -0400 | [diff] [blame] | 82 | |
Robert Phillips | 6989370 | 2019-02-22 11:16:30 -0500 | [diff] [blame] | 83 | GrRecordingContext* getContext() { return fContext; } |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 84 | |
robertphillips | 6873782 | 2015-10-29 12:12:21 -0700 | [diff] [blame] | 85 | GrPathRenderer* getPathRenderer(const GrPathRenderer::CanDrawPathArgs& args, |
| 86 | bool allowSW, |
| 87 | GrPathRendererChain::DrawType drawType, |
Ben Wagner | a93a14a | 2017-08-28 10:34:05 -0400 | [diff] [blame] | 88 | GrPathRenderer::StencilSupport* stencilSupport = nullptr); |
robertphillips | 6873782 | 2015-10-29 12:12:21 -0700 | [diff] [blame] | 89 | |
Brian Salomon | e7df0bb | 2018-05-07 14:44:57 -0400 | [diff] [blame] | 90 | GrPathRenderer* getSoftwarePathRenderer(); |
| 91 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 92 | // Returns a direct pointer to the coverage counting path renderer, or null if it is not |
| 93 | // supported and turned on. |
| 94 | GrCoverageCountingPathRenderer* getCoverageCountingPathRenderer(); |
| 95 | |
Brian Salomon | 653f42f | 2018-07-10 10:07:31 -0400 | [diff] [blame] | 96 | void flushIfNecessary(); |
bsalomon | b77a907 | 2016-09-07 10:02:04 -0700 | [diff] [blame] | 97 | |
Robert Phillips | 4e105e2 | 2020-07-16 09:18:50 -0400 | [diff] [blame^] | 98 | static bool ProgramUnitTest(GrDirectContext*, int maxStages, int maxLevels); |
robertphillips | a13e202 | 2015-11-11 12:01:09 -0800 | [diff] [blame] | 99 | |
Brian Salomon | f9a1fdf | 2019-05-09 10:30:12 -0400 | [diff] [blame] | 100 | GrSemaphoresSubmitted flushSurfaces(GrSurfaceProxy* proxies[], |
| 101 | int cnt, |
| 102 | SkSurface::BackendSurfaceAccess access, |
Greg Daniel | 9efe386 | 2020-06-11 11:51:06 -0400 | [diff] [blame] | 103 | const GrFlushInfo& info, |
| 104 | const GrBackendSurfaceMutableState* newState); |
Greg Daniel | 55f040b | 2020-02-13 15:38:32 +0000 | [diff] [blame] | 105 | GrSemaphoresSubmitted flushSurface(GrSurfaceProxy* proxy, |
Greg Daniel | 4aa13e7 | 2019-04-15 14:42:20 -0400 | [diff] [blame] | 106 | SkSurface::BackendSurfaceAccess access, |
Greg Daniel | 9efe386 | 2020-06-11 11:51:06 -0400 | [diff] [blame] | 107 | const GrFlushInfo& info, |
| 108 | const GrBackendSurfaceMutableState* newState) { |
| 109 | return this->flushSurfaces(&proxy, 1, access, info, newState); |
Brian Salomon | f9a1fdf | 2019-05-09 10:30:12 -0400 | [diff] [blame] | 110 | } |
bsalomon | 6a2b194 | 2016-09-08 11:28:59 -0700 | [diff] [blame] | 111 | |
Chris Dalton | fe199b7 | 2017-05-05 11:26:15 -0400 | [diff] [blame] | 112 | void addOnFlushCallbackObject(GrOnFlushCallbackObject*); |
Robert Phillips | dbaf317 | 2019-02-06 15:12:53 -0500 | [diff] [blame] | 113 | |
| 114 | #if GR_TEST_UTILS |
Chris Dalton | fe199b7 | 2017-05-05 11:26:15 -0400 | [diff] [blame] | 115 | void testingOnly_removeOnFlushCallbackObject(GrOnFlushCallbackObject*); |
Robert Phillips | dbaf317 | 2019-02-06 15:12:53 -0500 | [diff] [blame] | 116 | #endif |
Robert Phillips | eb35f4d | 2017-03-21 07:56:47 -0400 | [diff] [blame] | 117 | |
Adlai Holler | d71b7b0 | 2020-06-08 15:55:00 -0400 | [diff] [blame] | 118 | GrRenderTask* getLastRenderTask(const GrSurfaceProxy*) const; |
| 119 | GrOpsTask* getLastOpsTask(const GrSurfaceProxy*) const; |
| 120 | void setLastRenderTask(const GrSurfaceProxy*, GrRenderTask*); |
| 121 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 122 | void moveRenderTasksToDDL(SkDeferredDisplayList* ddl); |
Adlai Holler | 7580ad4 | 2020-06-24 13:45:25 -0400 | [diff] [blame] | 123 | void copyRenderTasksFromDDL(sk_sp<const SkDeferredDisplayList>, GrRenderTargetProxy* newDest); |
Robert Phillips | 6200036 | 2018-02-01 09:10:04 -0500 | [diff] [blame] | 124 | |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 125 | private: |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 126 | // This class encapsulates maintenance and manipulation of the drawing manager's DAG of |
| 127 | // renderTasks. |
| 128 | class RenderTaskDAG { |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 129 | public: |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 130 | RenderTaskDAG(bool sortRenderTasks); |
| 131 | ~RenderTaskDAG(); |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 132 | |
| 133 | // Currently, when explicitly allocating resources, this call will topologically sort the |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 134 | // GrRenderTasks. |
| 135 | // MDB TODO: remove once incremental GrRenderTask sorting is enabled |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 136 | void prepForFlush(); |
| 137 | |
| 138 | void closeAll(const GrCaps* caps); |
| 139 | |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 140 | void gatherIDs(SkSTArray<8, uint32_t, true>* idArray) const; |
| 141 | |
| 142 | void reset(); |
| 143 | |
Adlai Holler | 96ead54 | 2020-06-26 08:50:14 -0400 | [diff] [blame] | 144 | // This call forceably removes GrRenderTasks from the DAG. It is problematic bc it |
| 145 | // just removes the GrRenderTasks but doesn't cleanup any referring pointers (i.e. |
| 146 | // dependency pointers in the DAG). It works right now bc it is only called after the |
| 147 | // topological sort is complete (so the dangling pointers aren't used). |
| 148 | void rawRemoveRenderTasks(int startIndex, int stopIndex); |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 149 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 150 | bool empty() const { return fRenderTasks.empty(); } |
| 151 | int numRenderTasks() const { return fRenderTasks.count(); } |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 152 | |
Robert Phillips | 9313aa7 | 2019-04-09 18:41:27 -0400 | [diff] [blame] | 153 | bool isUsed(GrSurfaceProxy*) const; |
| 154 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 155 | GrRenderTask* renderTask(int index) { return fRenderTasks[index].get(); } |
| 156 | const GrRenderTask* renderTask(int index) const { return fRenderTasks[index].get(); } |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 157 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 158 | GrRenderTask* back() { return fRenderTasks.back().get(); } |
| 159 | const GrRenderTask* back() const { return fRenderTasks.back().get(); } |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 160 | |
Chris Dalton | 3d77027 | 2019-08-14 09:24:37 -0600 | [diff] [blame] | 161 | GrRenderTask* add(sk_sp<GrRenderTask>); |
| 162 | GrRenderTask* addBeforeLast(sk_sp<GrRenderTask>); |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 163 | void add(const SkTArray<sk_sp<GrRenderTask>>&); |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 164 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 165 | void swap(SkTArray<sk_sp<GrRenderTask>>* renderTasks); |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 166 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 167 | bool sortingRenderTasks() const { return fSortRenderTasks; } |
Robert Phillips | 46acf9d | 2018-10-09 09:31:40 -0400 | [diff] [blame] | 168 | |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 169 | private: |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 170 | SkTArray<sk_sp<GrRenderTask>> fRenderTasks; |
| 171 | bool fSortRenderTasks; |
Robert Phillips | 22310d6 | 2018-09-05 11:07:21 -0400 | [diff] [blame] | 172 | }; |
| 173 | |
Herb Derby | 082232b | 2020-06-10 15:08:18 -0400 | [diff] [blame] | 174 | GrDrawingManager(GrRecordingContext*, |
| 175 | const GrPathRendererChain::Options&, |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 176 | bool sortRenderTasks, |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 177 | bool reduceOpsTaskSplitting); |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 178 | |
Robert Phillips | a9162df | 2019-02-11 14:12:03 -0500 | [diff] [blame] | 179 | bool wasAbandoned() const; |
| 180 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 181 | // Closes the target's dependent render tasks (or, if not in sorting/opsTask-splitting-reduction |
| 182 | // mode, closes fActiveOpsTask) in preparation for us opening a new opsTask that will write to |
Chris Dalton | 5fe9977 | 2019-08-06 11:57:39 -0600 | [diff] [blame] | 183 | // 'target'. |
Greg Daniel | bbfec9d | 2019-08-20 10:56:51 -0400 | [diff] [blame] | 184 | void closeRenderTasksForNewRenderTask(GrSurfaceProxy* target); |
Chris Dalton | 5fe9977 | 2019-08-06 11:57:39 -0600 | [diff] [blame] | 185 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 186 | // return true if any GrRenderTasks were actually executed; false otherwise |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 187 | bool executeRenderTasks(int startIndex, int stopIndex, GrOpFlushState*, |
| 188 | int* numRenderTasksExecuted); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 189 | |
Adlai Holler | d71b7b0 | 2020-06-08 15:55:00 -0400 | [diff] [blame] | 190 | void removeRenderTasks(int startIndex, int stopIndex); |
| 191 | |
Greg Daniel | fe15962 | 2020-04-10 17:43:51 +0000 | [diff] [blame] | 192 | bool flush(GrSurfaceProxy* proxies[], |
| 193 | int numProxies, |
| 194 | SkSurface::BackendSurfaceAccess access, |
Greg Daniel | 9efe386 | 2020-06-11 11:51:06 -0400 | [diff] [blame] | 195 | const GrFlushInfo&, |
| 196 | const GrBackendSurfaceMutableState* newState); |
Greg Daniel | fe15962 | 2020-04-10 17:43:51 +0000 | [diff] [blame] | 197 | |
| 198 | bool submitToGpu(bool syncToCpu); |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 199 | |
Robert Phillips | 38d64b0 | 2018-09-04 13:23:26 -0400 | [diff] [blame] | 200 | SkDEBUGCODE(void validate() const); |
| 201 | |
Robert Phillips | 6989370 | 2019-02-22 11:16:30 -0500 | [diff] [blame] | 202 | friend class GrContext; // access to: flush & cleanup |
Robert Phillips | 7ee385e | 2017-03-30 08:02:11 -0400 | [diff] [blame] | 203 | friend class GrContextPriv; // access to: flush |
Chris Dalton | fe199b7 | 2017-05-05 11:26:15 -0400 | [diff] [blame] | 204 | friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class |
Robert Phillips | 6989370 | 2019-02-22 11:16:30 -0500 | [diff] [blame] | 205 | friend class GrRecordingContext; // access to: ctor |
Greg Daniel | b6c15ba | 2019-03-04 13:08:25 -0500 | [diff] [blame] | 206 | friend class SkImage; // for access to: flush |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 207 | |
| 208 | static const int kNumPixelGeometries = 5; // The different pixel geometries |
| 209 | static const int kNumDFTOptions = 2; // DFT or no DFT |
| 210 | |
Robert Phillips | 6989370 | 2019-02-22 11:16:30 -0500 | [diff] [blame] | 211 | GrRecordingContext* fContext; |
bsalomon | 6b2552f | 2016-09-15 13:50:26 -0700 | [diff] [blame] | 212 | GrPathRendererChain::Options fOptionsForPathRendererChain; |
Herb Derby | 082232b | 2020-06-10 15:08:18 -0400 | [diff] [blame] | 213 | |
Brian Salomon | 601ac80 | 2019-02-07 13:37:16 -0500 | [diff] [blame] | 214 | // This cache is used by both the vertex and index pools. It reuses memory across multiple |
| 215 | // flushes. |
| 216 | sk_sp<GrBufferAllocPool::CpuBufferCache> fCpuBufferCache; |
joshualitt | de8dc7e | 2016-01-08 10:09:13 -0800 | [diff] [blame] | 217 | |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 218 | RenderTaskDAG fDAG; |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 219 | GrOpsTask* fActiveOpsTask = nullptr; |
| 220 | // These are the IDs of the opsTask currently being flushed (in internalFlush) |
Chris Dalton | 6b49810 | 2019-08-01 14:14:52 -0600 | [diff] [blame] | 221 | SkSTArray<8, uint32_t, true> fFlushingRenderTaskIDs; |
Chris Dalton | c4b4735 | 2019-08-23 10:10:36 -0600 | [diff] [blame] | 222 | // These are the new renderTasks generated by the onFlush CBs |
| 223 | SkSTArray<4, sk_sp<GrRenderTask>> fOnFlushRenderTasks; |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 224 | |
Ben Wagner | 9ec70c6 | 2018-07-12 13:30:47 -0400 | [diff] [blame] | 225 | std::unique_ptr<GrPathRendererChain> fPathRendererChain; |
| 226 | sk_sp<GrSoftwarePathRenderer> fSoftwarePathRenderer; |
brianosman | 86e7626 | 2016-08-11 12:17:31 -0700 | [diff] [blame] | 227 | |
Robert Phillips | 40a29d7 | 2018-01-18 12:59:22 -0500 | [diff] [blame] | 228 | GrTokenTracker fTokenTracker; |
brianosman | 86e7626 | 2016-08-11 12:17:31 -0700 | [diff] [blame] | 229 | bool fFlushing; |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 230 | bool fReduceOpsTaskSplitting; |
bsalomon | b77a907 | 2016-09-07 10:02:04 -0700 | [diff] [blame] | 231 | |
Chris Dalton | fe199b7 | 2017-05-05 11:26:15 -0400 | [diff] [blame] | 232 | SkTArray<GrOnFlushCallbackObject*> fOnFlushCBObjects; |
Robert Phillips | 15c9142 | 2019-05-07 16:54:48 -0400 | [diff] [blame] | 233 | |
Robert Phillips | 7eb0a5f | 2020-06-09 14:15:09 -0400 | [diff] [blame] | 234 | void addDDLTarget(GrSurfaceProxy* newTarget, GrRenderTargetProxy* ddlTarget) { |
| 235 | fDDLTargets.set(newTarget->uniqueID().asUInt(), ddlTarget); |
| 236 | } |
| 237 | bool isDDLTarget(GrSurfaceProxy* newTarget) { |
| 238 | return SkToBool(fDDLTargets.find(newTarget->uniqueID().asUInt())); |
| 239 | } |
| 240 | GrRenderTargetProxy* getDDLTarget(GrSurfaceProxy* newTarget) { |
| 241 | auto entry = fDDLTargets.find(newTarget->uniqueID().asUInt()); |
| 242 | return entry ? *entry : nullptr; |
| 243 | } |
| 244 | void clearDDLTargets() { fDDLTargets.reset(); } |
Robert Phillips | 15c9142 | 2019-05-07 16:54:48 -0400 | [diff] [blame] | 245 | |
| 246 | // We play a trick with lazy proxies to retarget the base target of a DDL to the SkSurface |
Robert Phillips | 7eb0a5f | 2020-06-09 14:15:09 -0400 | [diff] [blame] | 247 | // it is replayed on. 'fDDLTargets' stores this mapping from SkSurface unique proxy ID |
| 248 | // to the DDL's lazy proxy. |
Robert Phillips | 15c9142 | 2019-05-07 16:54:48 -0400 | [diff] [blame] | 249 | // Note: we do not expect a whole lot of these per flush |
Robert Phillips | 7eb0a5f | 2020-06-09 14:15:09 -0400 | [diff] [blame] | 250 | SkTHashMap<uint32_t, GrRenderTargetProxy*> fDDLTargets; |
Adlai Holler | d71b7b0 | 2020-06-08 15:55:00 -0400 | [diff] [blame] | 251 | |
Robert Phillips | c7ed7e6 | 2020-06-29 20:04:57 +0000 | [diff] [blame] | 252 | struct SurfaceIDKeyTraits { |
| 253 | static uint32_t GetInvalidKey() { |
| 254 | return GrSurfaceProxy::UniqueID::InvalidID().asUInt(); |
Adlai Holler | 1bdbe55 | 2020-06-12 11:28:15 -0400 | [diff] [blame] | 255 | } |
| 256 | }; |
| 257 | |
Robert Phillips | c7ed7e6 | 2020-06-29 20:04:57 +0000 | [diff] [blame] | 258 | GrHashMapWithCache<uint32_t, GrRenderTask*, SurfaceIDKeyTraits, GrCheapHash> fLastRenderTasks; |
robertphillips | 77a2e52 | 2015-10-17 07:43:27 -0700 | [diff] [blame] | 259 | }; |
| 260 | |
| 261 | #endif |