blob: 3f6c77379a91cad44ea9dc2e7c4c7ae429f06fa9 [file] [log] [blame]
robertphillips3dc6ae52015-10-20 09:54:32 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
bsalomon5eb41fd2016-09-06 13:49:32 -07008#include "GrDrawingManager.h"
bsalomonb77a9072016-09-07 10:02:04 -07009
Greg Daniel51316782017-08-02 15:10:09 +000010#include "GrBackendSemaphore.h"
bsalomonb77a9072016-09-07 10:02:04 -070011#include "GrContext.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050012#include "GrContextPriv.h"
Robert Phillips646e4292017-06-13 12:44:56 -040013#include "GrGpu.h"
Robert Phillipsfbcef6e2017-06-15 12:07:18 -040014#include "GrOnFlushResourceProvider.h"
Robert Phillips9d6c64f2017-09-14 10:56:45 -040015#include "GrOpList.h"
Brian Osman11052242016-10-27 14:47:55 -040016#include "GrRenderTargetContext.h"
17#include "GrPathRenderingRenderTargetContext.h"
Robert Phillipsc7635fa2016-10-28 13:25:24 -040018#include "GrRenderTargetProxy.h"
Robert Phillipsd375dbf2017-09-14 12:45:25 -040019#include "GrResourceAllocator.h"
robertphillips3dc6ae52015-10-20 09:54:32 -070020#include "GrResourceProvider.h"
robertphillips68737822015-10-29 12:12:21 -070021#include "GrSoftwarePathRenderer.h"
Robert Phillips7ee385e2017-03-30 08:02:11 -040022#include "GrSurfaceProxyPriv.h"
Brian Osman45580d32016-11-23 09:37:01 -050023#include "GrTextureContext.h"
24#include "GrTextureOpList.h"
Chris Dalton706a6ff2017-11-29 22:01:06 -070025#include "GrTextureProxy.h"
26#include "GrTextureProxyPriv.h"
brianosman0e22eb82016-08-30 07:07:59 -070027#include "SkSurface_Gpu.h"
robertphillips3dc6ae52015-10-20 09:54:32 -070028#include "SkTTopoSort.h"
29
Brian Salomondcbb9d92017-07-19 10:53:20 -040030#include "GrTracing.h"
joshualitte8042922015-12-11 06:11:21 -080031#include "text/GrAtlasTextContext.h"
32#include "text/GrStencilAndCoverTextContext.h"
robertphillips498d7ac2015-10-30 10:11:30 -070033
robertphillips3dc6ae52015-10-20 09:54:32 -070034void GrDrawingManager::cleanup() {
Robert Phillipsf2361d22016-10-25 14:20:06 -040035 for (int i = 0; i < fOpLists.count(); ++i) {
Robert Phillipsee683652017-04-26 11:53:10 -040036 // no opList should receive a new command after this
37 fOpLists[i]->makeClosed(*fContext->caps());
robertphillips0dfa62c2015-11-16 06:23:31 -080038
Robert Phillipsf2361d22016-10-25 14:20:06 -040039 // We shouldn't need to do this, but it turns out some clients still hold onto opLists
Robert Phillips6cdc22c2017-05-11 16:29:14 -040040 // after a cleanup.
41 // MDB TODO: is this still true?
Chris Daltona84cacf2017-10-04 10:30:29 -060042 if (!fOpLists[i]->unique()) {
43 // TODO: Eventually this should be guaranteed unique.
44 // https://bugs.chromium.org/p/skia/issues/detail?id=7111
45 fOpLists[i]->endFlush();
46 }
robertphillips3dc6ae52015-10-20 09:54:32 -070047 }
48
Robert Phillipsf2361d22016-10-25 14:20:06 -040049 fOpLists.reset();
robertphillips3dc6ae52015-10-20 09:54:32 -070050
robertphillips13391dd2015-10-30 05:15:11 -070051 delete fPathRendererChain;
52 fPathRendererChain = nullptr;
robertphillips68737822015-10-29 12:12:21 -070053 SkSafeSetNull(fSoftwarePathRenderer);
Jim Van Verth106b5c42017-09-26 12:45:29 -040054
55 fOnFlushCBObjects.reset();
robertphillips3dc6ae52015-10-20 09:54:32 -070056}
57
58GrDrawingManager::~GrDrawingManager() {
59 this->cleanup();
60}
61
62void GrDrawingManager::abandon() {
63 fAbandoned = true;
64 this->cleanup();
65}
66
robertphillips68737822015-10-29 12:12:21 -070067void GrDrawingManager::freeGpuResources() {
Jim Van Verth106b5c42017-09-26 12:45:29 -040068 for (int i = fOnFlushCBObjects.count() - 1; i >= 0; --i) {
69 if (!fOnFlushCBObjects[i]->retainOnFreeGpuResources()) {
70 // it's safe to just do this because we're iterating in reverse
71 fOnFlushCBObjects.removeShuffle(i);
72 }
73 }
74
robertphillips68737822015-10-29 12:12:21 -070075 // a path renderer may be holding onto resources
robertphillips13391dd2015-10-30 05:15:11 -070076 delete fPathRendererChain;
77 fPathRendererChain = nullptr;
robertphillips68737822015-10-29 12:12:21 -070078 SkSafeSetNull(fSoftwarePathRenderer);
Robert Phillipse3302df2017-04-24 07:31:02 -040079}
80
Robert Phillips7ee385e2017-03-30 08:02:11 -040081// MDB TODO: make use of the 'proxy' parameter.
Greg Daniel51316782017-08-02 15:10:09 +000082GrSemaphoresSubmitted GrDrawingManager::internalFlush(GrSurfaceProxy*,
83 GrResourceCache::FlushType type,
84 int numSemaphores,
85 GrBackendSemaphore backendSemaphores[]) {
Brian Salomondcbb9d92017-07-19 10:53:20 -040086 GR_CREATE_TRACE_MARKER_CONTEXT("GrDrawingManager", "internalFlush", fContext);
87
robertphillips7761d612016-05-16 09:14:53 -070088 if (fFlushing || this->wasAbandoned()) {
Greg Daniel51316782017-08-02 15:10:09 +000089 return GrSemaphoresSubmitted::kNo;
joshualittb8918c42015-12-18 09:59:46 -080090 }
91 fFlushing = true;
Robert Phillipseb35f4d2017-03-21 07:56:47 -040092
93 for (int i = 0; i < fOpLists.count(); ++i) {
94 // Semi-usually the GrOpLists are already closed at this point, but sometimes Ganesh
95 // needs to flush mid-draw. In that case, the SkGpuDevice's GrOpLists won't be closed
96 // but need to be flushed anyway. Closing such GrOpLists here will mean new
97 // GrOpLists will be created to replace them if the SkGpuDevice(s) write to them again.
Robert Phillipsee683652017-04-26 11:53:10 -040098 fOpLists[i]->makeClosed(*fContext->caps());
Robert Phillipseb35f4d2017-03-21 07:56:47 -040099 }
100
Robert Phillipsa4c93ac2017-05-18 11:40:04 -0400101#ifdef SK_DEBUG
102 // This block checks for any unnecessary splits in the opLists. If two sequential opLists
103 // share the same backing GrSurfaceProxy it means the opList was artificially split.
104 if (fOpLists.count()) {
105 GrRenderTargetOpList* prevOpList = fOpLists[0]->asRenderTargetOpList();
106 for (int i = 1; i < fOpLists.count(); ++i) {
107 GrRenderTargetOpList* curOpList = fOpLists[i]->asRenderTargetOpList();
108
109 if (prevOpList && curOpList) {
110 SkASSERT(prevOpList->fTarget.get() != curOpList->fTarget.get());
111 }
112
113 prevOpList = curOpList;
114 }
115 }
116#endif
117
Robert Phillips2188e092017-11-09 13:57:06 -0500118#ifndef SK_DISABLE_RENDER_TARGET_SORTING
halcanary9d524f22016-03-29 09:03:52 -0700119 SkDEBUGCODE(bool result =)
Robert Phillipsf2361d22016-10-25 14:20:06 -0400120 SkTTopoSort<GrOpList, GrOpList::TopoSortTraits>(&fOpLists);
robertphillips3dc6ae52015-10-20 09:54:32 -0700121 SkASSERT(result);
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400122#endif
robertphillips3dc6ae52015-10-20 09:54:32 -0700123
Hal Canary07320e42018-01-19 18:18:20 +0000124 GrOpFlushState flushState(fContext->getGpu(),
125 fContext->contextPriv().resourceProvider(),
Robert Phillips40a29d72018-01-18 12:59:22 -0500126 &fTokenTracker);
127
Chris Daltonfe199b72017-05-05 11:26:15 -0400128 GrOnFlushResourceProvider onFlushProvider(this);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500129 // TODO: AFAICT the only reason fFlushState is on GrDrawingManager rather than on the
130 // stack here is to preserve the flush tokens.
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400131
Chris Dalton12658942017-10-05 19:45:25 -0600132 // Prepare any onFlush op lists (e.g. atlases).
Chris Daltonfe199b72017-05-05 11:26:15 -0400133 if (!fOnFlushCBObjects.empty()) {
Chris Dalton3968ff92017-11-27 12:26:31 -0700134 fFlushingOpListIDs.reset(fOpLists.count());
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400135 for (int i = 0; i < fOpLists.count(); ++i) {
Chris Dalton3968ff92017-11-27 12:26:31 -0700136 fFlushingOpListIDs[i] = fOpLists[i]->uniqueID();
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400137 }
Chris Dalton12658942017-10-05 19:45:25 -0600138 SkSTArray<4, sk_sp<GrRenderTargetContext>> renderTargetContexts;
Chris Daltonfe199b72017-05-05 11:26:15 -0400139 for (GrOnFlushCallbackObject* onFlushCBObject : fOnFlushCBObjects) {
140 onFlushCBObject->preFlush(&onFlushProvider,
Chris Dalton3968ff92017-11-27 12:26:31 -0700141 fFlushingOpListIDs.begin(), fFlushingOpListIDs.count(),
Chris Daltonfe199b72017-05-05 11:26:15 -0400142 &renderTargetContexts);
Chris Dalton12658942017-10-05 19:45:25 -0600143 for (const sk_sp<GrRenderTargetContext>& rtc : renderTargetContexts) {
Chris Dalton706a6ff2017-11-29 22:01:06 -0700144 sk_sp<GrRenderTargetOpList> onFlushOpList = sk_ref_sp(rtc->getRTOpList());
Chris Dalton12658942017-10-05 19:45:25 -0600145 if (!onFlushOpList) {
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400146 continue; // Odd - but not a big deal
147 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700148#ifdef SK_DEBUG
149 // OnFlush callbacks are already invoked during flush, and are therefore expected to
150 // handle resource allocation & usage on their own. (No deferred or lazy proxies!)
151 onFlushOpList->visitProxies_debugOnly([](GrSurfaceProxy* p) {
152 SkASSERT(!p->asTextureProxy() || !p->asTextureProxy()->texPriv().isDeferred());
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500153 SkASSERT(GrSurfaceProxy::LazyState::kNot == p->lazyInstantiationState());
Chris Dalton706a6ff2017-11-29 22:01:06 -0700154 });
155#endif
Chris Dalton12658942017-10-05 19:45:25 -0600156 onFlushOpList->makeClosed(*fContext->caps());
Robert Phillips40a29d72018-01-18 12:59:22 -0500157 onFlushOpList->prepare(&flushState);
Chris Dalton3968ff92017-11-27 12:26:31 -0700158 fOnFlushCBOpLists.push_back(std::move(onFlushOpList));
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400159 }
160 renderTargetContexts.reset();
161 }
162 }
163
robertphillipsa13e2022015-11-11 12:01:09 -0800164#if 0
Brian Salomon09d994e2016-12-21 11:14:46 -0500165 // Enable this to print out verbose GrOp information
Robert Phillipsf2361d22016-10-25 14:20:06 -0400166 for (int i = 0; i < fOpLists.count(); ++i) {
167 SkDEBUGCODE(fOpLists[i]->dump();)
robertphillips3dc6ae52015-10-20 09:54:32 -0700168 }
robertphillipsa13e2022015-11-11 12:01:09 -0800169#endif
170
Robert Phillipseafd48a2017-11-16 07:52:08 -0500171 int startIndex, stopIndex;
172 bool flushed = false;
173
Robert Phillipsf8e25022017-11-08 15:24:31 -0500174 {
Robert Phillips6be756b2018-01-16 15:07:54 -0500175 GrResourceAllocator alloc(fContext->contextPriv().resourceProvider());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500176 for (int i = 0; i < fOpLists.count(); ++i) {
177 fOpLists[i]->gatherProxyIntervals(&alloc);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500178 alloc.markEndOfOpList(i);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500179 }
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400180
Robert Phillipseafd48a2017-11-16 07:52:08 -0500181#ifdef SK_DISABLE_EXPLICIT_GPU_RESOURCE_ALLOCATION
182 startIndex = 0;
183 stopIndex = fOpLists.count();
184#else
185 while (alloc.assign(&startIndex, &stopIndex))
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400186#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500187 {
Robert Phillips40a29d72018-01-18 12:59:22 -0500188 if (this->executeOpLists(startIndex, stopIndex, &flushState)) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500189 flushed = true;
190 }
bsalomondc438982016-08-31 11:53:49 -0700191 }
Greg Danielc42b20b2017-10-04 10:34:49 -0400192 }
193
Robert Phillips18e94842017-05-15 13:06:44 -0400194 fOpLists.reset();
robertphillipsa13e2022015-11-11 12:01:09 -0800195
Hal Canary07320e42018-01-19 18:18:20 +0000196 GrSemaphoresSubmitted result = fContext->getGpu()->finishFlush(numSemaphores,
197 backendSemaphores);
robertphillipsa13e2022015-11-11 12:01:09 -0800198
robertphillipsee843b22016-10-04 05:30:20 -0700199 // We always have to notify the cache when it requested a flush so it can reset its state.
200 if (flushed || type == GrResourceCache::FlushType::kCacheRequested) {
Robert Phillips6be756b2018-01-16 15:07:54 -0500201 fContext->contextPriv().getResourceCache()->notifyFlushOccurred(type);
bsalomonb77a9072016-09-07 10:02:04 -0700202 }
Chris Daltonfe199b72017-05-05 11:26:15 -0400203 for (GrOnFlushCallbackObject* onFlushCBObject : fOnFlushCBObjects) {
Robert Phillips40a29d72018-01-18 12:59:22 -0500204 onFlushCBObject->postFlush(fTokenTracker.nextTokenToFlush(), fFlushingOpListIDs.begin(),
Chris Dalton3968ff92017-11-27 12:26:31 -0700205 fFlushingOpListIDs.count());
Chris Daltonfe199b72017-05-05 11:26:15 -0400206 }
Chris Dalton3968ff92017-11-27 12:26:31 -0700207 fFlushingOpListIDs.reset();
joshualittb8918c42015-12-18 09:59:46 -0800208 fFlushing = false;
Greg Daniel51316782017-08-02 15:10:09 +0000209
210 return result;
robertphillips3dc6ae52015-10-20 09:54:32 -0700211}
212
Robert Phillipseafd48a2017-11-16 07:52:08 -0500213bool GrDrawingManager::executeOpLists(int startIndex, int stopIndex, GrOpFlushState* flushState) {
214 SkASSERT(startIndex <= stopIndex && stopIndex <= fOpLists.count());
215
216 bool anyOpListsExecuted = false;
217
218 for (int i = startIndex; i < stopIndex; ++i) {
219 if (!fOpLists[i]) {
220 continue;
221 }
222
223#ifdef SK_DISABLE_EXPLICIT_GPU_RESOURCE_ALLOCATION
Robert Phillips6be756b2018-01-16 15:07:54 -0500224 if (!fOpLists[i]->instantiate(fContext->contextPriv().resourceProvider())) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500225 SkDebugf("OpList failed to instantiate.\n");
226 fOpLists[i] = nullptr;
227 continue;
228 }
229#else
230 SkASSERT(fOpLists[i]->isInstantiated());
231#endif
232
233 // TODO: handle this instantiation via lazy surface proxies?
234 // Instantiate all deferred proxies (being built on worker threads) so we can upload them
Robert Phillips6be756b2018-01-16 15:07:54 -0500235 fOpLists[i]->instantiateDeferredProxies(fContext->contextPriv().resourceProvider());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500236 fOpLists[i]->prepare(flushState);
237 }
238
239 // Upload all data to the GPU
240 flushState->preExecuteDraws();
241
242 // Execute the onFlush op lists first, if any.
Chris Dalton3968ff92017-11-27 12:26:31 -0700243 for (sk_sp<GrOpList>& onFlushOpList : fOnFlushCBOpLists) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500244 if (!onFlushOpList->execute(flushState)) {
245 SkDebugf("WARNING: onFlushOpList failed to execute.\n");
246 }
247 SkASSERT(onFlushOpList->unique());
248 onFlushOpList = nullptr;
249 }
Chris Dalton3968ff92017-11-27 12:26:31 -0700250 fOnFlushCBOpLists.reset();
Robert Phillipseafd48a2017-11-16 07:52:08 -0500251
252 // Execute the normal op lists.
253 for (int i = startIndex; i < stopIndex; ++i) {
254 if (!fOpLists[i]) {
255 continue;
256 }
257
258 if (fOpLists[i]->execute(flushState)) {
259 anyOpListsExecuted = true;
260 }
261 }
262
263 SkASSERT(!flushState->commandBuffer());
Robert Phillips40a29d72018-01-18 12:59:22 -0500264 SkASSERT(fTokenTracker.nextDrawToken() == fTokenTracker.nextTokenToFlush());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500265
266 // We reset the flush state before the OpLists so that the last resources to be freed are those
267 // that are written to in the OpLists. This helps to make sure the most recently used resources
268 // are the last to be purged by the resource cache.
269 flushState->reset();
270
271 for (int i = startIndex; i < stopIndex; ++i) {
272 if (!fOpLists[i]) {
273 continue;
274 }
275 if (!fOpLists[i]->unique()) {
276 // TODO: Eventually this should be guaranteed unique.
277 // https://bugs.chromium.org/p/skia/issues/detail?id=7111
278 fOpLists[i]->endFlush();
279 }
280 fOpLists[i] = nullptr;
281 }
282
283 return anyOpListsExecuted;
284}
285
Greg Daniel51316782017-08-02 15:10:09 +0000286GrSemaphoresSubmitted GrDrawingManager::prepareSurfaceForExternalIO(
287 GrSurfaceProxy* proxy, int numSemaphores, GrBackendSemaphore backendSemaphores[]) {
bsalomon6a2b1942016-09-08 11:28:59 -0700288 if (this->wasAbandoned()) {
Greg Daniel51316782017-08-02 15:10:09 +0000289 return GrSemaphoresSubmitted::kNo;
bsalomon6a2b1942016-09-08 11:28:59 -0700290 }
Robert Phillips7ee385e2017-03-30 08:02:11 -0400291 SkASSERT(proxy);
bsalomon6a2b1942016-09-08 11:28:59 -0700292
Kevin Lubick42846132018-01-05 10:11:11 -0500293 GrSemaphoresSubmitted result = GrSemaphoresSubmitted::kNo;
Greg Daniel51316782017-08-02 15:10:09 +0000294 if (proxy->priv().hasPendingIO() || numSemaphores) {
295 result = this->flush(proxy, numSemaphores, backendSemaphores);
bsalomon6a2b1942016-09-08 11:28:59 -0700296 }
297
Robert Phillips6be756b2018-01-16 15:07:54 -0500298 if (!proxy->instantiate(fContext->contextPriv().resourceProvider())) {
Greg Daniel51316782017-08-02 15:10:09 +0000299 return result;
Robert Phillips7ee385e2017-03-30 08:02:11 -0400300 }
301
Robert Phillipseee4d6e2017-06-05 09:26:07 -0400302 GrSurface* surface = proxy->priv().peekSurface();
303
Hal Canary07320e42018-01-19 18:18:20 +0000304 if (fContext->getGpu() && surface->asRenderTarget()) {
305 fContext->getGpu()->resolveRenderTarget(surface->asRenderTarget(), proxy->origin());
bsalomon6a2b1942016-09-08 11:28:59 -0700306 }
Greg Daniel51316782017-08-02 15:10:09 +0000307 return result;
bsalomon6a2b1942016-09-08 11:28:59 -0700308}
309
Chris Daltonfe199b72017-05-05 11:26:15 -0400310void GrDrawingManager::addOnFlushCallbackObject(GrOnFlushCallbackObject* onFlushCBObject) {
311 fOnFlushCBObjects.push_back(onFlushCBObject);
Robert Phillipseb35f4d2017-03-21 07:56:47 -0400312}
313
Robert Phillips941d1442017-06-14 16:37:02 -0400314sk_sp<GrRenderTargetOpList> GrDrawingManager::newRTOpList(GrRenderTargetProxy* rtp,
315 bool managedOpList) {
robertphillips3dc6ae52015-10-20 09:54:32 -0700316 SkASSERT(fContext);
317
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400318 // This is a temporary fix for the partial-MDB world. In that world we're not reordering
319 // so ops that (in the single opList world) would've just glommed onto the end of the single
320 // opList but referred to a far earlier RT need to appear in their own opList.
321 if (!fOpLists.empty()) {
322 fOpLists.back()->makeClosed(*fContext->caps());
robertphillips3dc6ae52015-10-20 09:54:32 -0700323 }
robertphillips3dc6ae52015-10-20 09:54:32 -0700324
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400325 sk_sp<GrRenderTargetOpList> opList(new GrRenderTargetOpList(rtp,
Hal Canary07320e42018-01-19 18:18:20 +0000326 fContext->getGpu(),
Robert Phillips8185f592017-04-26 08:31:08 -0400327 fContext->getAuditTrail()));
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400328 SkASSERT(rtp->getLastOpList() == opList.get());
robertphillips3dc6ae52015-10-20 09:54:32 -0700329
Robert Phillips941d1442017-06-14 16:37:02 -0400330 if (managedOpList) {
331 fOpLists.push_back() = opList;
332 }
robertphillips3dc6ae52015-10-20 09:54:32 -0700333
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400334 return opList;
robertphillips3dc6ae52015-10-20 09:54:32 -0700335}
336
Robert Phillipsb6deea82017-05-11 14:14:30 -0400337sk_sp<GrTextureOpList> GrDrawingManager::newTextureOpList(GrTextureProxy* textureProxy) {
Brian Osman45580d32016-11-23 09:37:01 -0500338 SkASSERT(fContext);
339
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400340 // This is a temporary fix for the partial-MDB world. In that world we're not reordering
341 // so ops that (in the single opList world) would've just glommed onto the end of the single
342 // opList but referred to a far earlier RT need to appear in their own opList.
343 if (!fOpLists.empty()) {
344 fOpLists.back()->makeClosed(*fContext->caps());
345 }
346
Robert Phillips6be756b2018-01-16 15:07:54 -0500347 sk_sp<GrTextureOpList> opList(new GrTextureOpList(fContext->contextPriv().resourceProvider(),
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400348 textureProxy,
349 fContext->getAuditTrail()));
Brian Osman45580d32016-11-23 09:37:01 -0500350
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400351 SkASSERT(textureProxy->getLastOpList() == opList.get());
Robert Phillips4a395042017-04-24 16:27:17 +0000352
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400353 fOpLists.push_back() = opList;
354
Robert Phillips4a395042017-04-24 16:27:17 +0000355 return opList;
Brian Osman45580d32016-11-23 09:37:01 -0500356}
357
brianosman86e76262016-08-11 12:17:31 -0700358GrAtlasTextContext* GrDrawingManager::getAtlasTextContext() {
359 if (!fAtlasTextContext) {
Brian Salomonaf597482017-11-07 16:23:34 -0500360 fAtlasTextContext = GrAtlasTextContext::Make(fOptionsForAtlasTextContext);
brianosman86e76262016-08-11 12:17:31 -0700361 }
362
363 return fAtlasTextContext.get();
364}
365
robertphillips68737822015-10-29 12:12:21 -0700366/*
367 * This method finds a path renderer that can draw the specified path on
368 * the provided target.
369 * Due to its expense, the software path renderer has split out so it can
370 * can be individually allowed/disallowed via the "allowSW" boolean.
371 */
372GrPathRenderer* GrDrawingManager::getPathRenderer(const GrPathRenderer::CanDrawPathArgs& args,
373 bool allowSW,
374 GrPathRendererChain::DrawType drawType,
375 GrPathRenderer::StencilSupport* stencilSupport) {
376
377 if (!fPathRendererChain) {
bsalomon6b2552f2016-09-15 13:50:26 -0700378 fPathRendererChain = new GrPathRendererChain(fContext, fOptionsForPathRendererChain);
robertphillips68737822015-10-29 12:12:21 -0700379 }
380
381 GrPathRenderer* pr = fPathRendererChain->getPathRenderer(args, drawType, stencilSupport);
382 if (!pr && allowSW) {
383 if (!fSoftwarePathRenderer) {
bsalomon39ef7fb2016-09-21 11:16:05 -0700384 fSoftwarePathRenderer =
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500385 new GrSoftwarePathRenderer(fContext->contextPriv().proxyProvider(),
bsalomon39ef7fb2016-09-21 11:16:05 -0700386 fOptionsForPathRendererChain.fAllowPathMaskCaching);
robertphillips68737822015-10-29 12:12:21 -0700387 }
Chris Dalton5ed44232017-09-07 13:22:46 -0600388 if (GrPathRenderer::CanDrawPath::kNo != fSoftwarePathRenderer->canDrawPath(args)) {
Brian Salomon0e8fc8b2016-12-09 15:10:07 -0500389 pr = fSoftwarePathRenderer;
390 }
robertphillips68737822015-10-29 12:12:21 -0700391 }
392
393 return pr;
394}
395
Chris Daltonfddb6c02017-11-04 15:22:22 -0600396GrCoverageCountingPathRenderer* GrDrawingManager::getCoverageCountingPathRenderer() {
397 if (!fPathRendererChain) {
398 fPathRendererChain = new GrPathRendererChain(fContext, fOptionsForPathRendererChain);
399 }
400 return fPathRendererChain->getCoverageCountingPathRenderer();
401}
402
Brian Osman11052242016-10-27 14:47:55 -0400403sk_sp<GrRenderTargetContext> GrDrawingManager::makeRenderTargetContext(
Robert Phillips37430132016-11-09 06:50:43 -0500404 sk_sp<GrSurfaceProxy> sProxy,
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400405 sk_sp<SkColorSpace> colorSpace,
Robert Phillips941d1442017-06-14 16:37:02 -0400406 const SkSurfaceProps* surfaceProps,
407 bool managedOpList) {
Robert Phillips37430132016-11-09 06:50:43 -0500408 if (this->wasAbandoned() || !sProxy->asRenderTargetProxy()) {
robertphillips3dc6ae52015-10-20 09:54:32 -0700409 return nullptr;
410 }
411
brianosman0e22eb82016-08-30 07:07:59 -0700412 // SkSurface catches bad color space usage at creation. This check handles anything that slips
413 // by, including internal usage. We allow a null color space here, for read/write pixels and
414 // other special code paths. If a color space is provided, though, enforce all other rules.
Robert Phillips2c862492017-01-18 10:08:39 -0500415 if (colorSpace && !SkSurface_Gpu::Valid(fContext, sProxy->config(), colorSpace.get())) {
brianosmana9c3c6a2016-09-29 10:08:36 -0700416 SkDEBUGFAIL("Invalid config and colorspace combination");
brianosman0e22eb82016-08-30 07:07:59 -0700417 return nullptr;
418 }
joshualitt96880d92016-02-16 10:36:53 -0800419
Robert Phillips2c862492017-01-18 10:08:39 -0500420 sk_sp<GrRenderTargetProxy> rtp(sk_ref_sp(sProxy->asRenderTargetProxy()));
421
joshualitt96880d92016-02-16 10:36:53 -0800422 bool useDIF = false;
423 if (surfaceProps) {
424 useDIF = surfaceProps->isUseDeviceIndependentFonts();
425 }
426
427 if (useDIF && fContext->caps()->shaderCaps()->pathRenderingSupport() &&
Brian Salomon7c8460e2017-05-12 11:36:10 -0400428 GrFSAAType::kNone != rtp->fsaaType()) {
Robert Phillipseee4d6e2017-06-05 09:26:07 -0400429
Robert Phillipsc0192e32017-09-21 12:00:26 -0400430 return sk_sp<GrRenderTargetContext>(new GrPathRenderingRenderTargetContext(
431 fContext, this, std::move(rtp),
432 std::move(colorSpace), surfaceProps,
433 fContext->getAuditTrail(), fSingleOwner));
joshualitt96880d92016-02-16 10:36:53 -0800434 }
435
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400436 return sk_sp<GrRenderTargetContext>(new GrRenderTargetContext(fContext, this, std::move(rtp),
Brian Osman11052242016-10-27 14:47:55 -0400437 std::move(colorSpace),
438 surfaceProps,
439 fContext->getAuditTrail(),
Robert Phillips941d1442017-06-14 16:37:02 -0400440 fSingleOwner, managedOpList));
robertphillips3dc6ae52015-10-20 09:54:32 -0700441}
Brian Osman45580d32016-11-23 09:37:01 -0500442
Robert Phillips2c862492017-01-18 10:08:39 -0500443sk_sp<GrTextureContext> GrDrawingManager::makeTextureContext(sk_sp<GrSurfaceProxy> sProxy,
444 sk_sp<SkColorSpace> colorSpace) {
Brian Osman45580d32016-11-23 09:37:01 -0500445 if (this->wasAbandoned() || !sProxy->asTextureProxy()) {
446 return nullptr;
447 }
448
Robert Phillips2c862492017-01-18 10:08:39 -0500449 // SkSurface catches bad color space usage at creation. This check handles anything that slips
450 // by, including internal usage. We allow a null color space here, for read/write pixels and
451 // other special code paths. If a color space is provided, though, enforce all other rules.
452 if (colorSpace && !SkSurface_Gpu::Valid(fContext, sProxy->config(), colorSpace.get())) {
453 SkDEBUGFAIL("Invalid config and colorspace combination");
454 return nullptr;
455 }
456
Brian Osman45580d32016-11-23 09:37:01 -0500457 // GrTextureRenderTargets should always be using GrRenderTargetContext
458 SkASSERT(!sProxy->asRenderTargetProxy());
459
460 sk_sp<GrTextureProxy> textureProxy(sk_ref_sp(sProxy->asTextureProxy()));
461
462 return sk_sp<GrTextureContext>(new GrTextureContext(fContext, this, std::move(textureProxy),
Robert Phillips2c862492017-01-18 10:08:39 -0500463 std::move(colorSpace),
464 fContext->getAuditTrail(),
465 fSingleOwner));
Brian Osman45580d32016-11-23 09:37:01 -0500466}