blob: cc410a39ef652163f3d82a45d6b033429ace6ceb [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Brian Salomon71283232021-04-08 12:45:58 -040013#include "src/core/SkAutoMalloc.h"
Adlai Holler9555f292020-10-09 09:41:14 -040014#include "src/core/SkTaskGroup.h"
Robert Phillips06273bc2021-08-11 15:43:50 -040015#include "src/core/SkTraceEvent.h"
Brian Salomon71283232021-04-08 12:45:58 -040016#include "src/gpu/GrBackendUtils.h"
Adlai Holler9555f292020-10-09 09:41:14 -040017#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040019#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040020#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050021#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040022#include "src/gpu/GrResourceProvider.h"
23#include "src/gpu/GrShaderUtils.h"
Robert Phillips53eaa642021-08-10 13:49:51 -040024#include "src/gpu/SurfaceContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/effects/GrSkSLFP.h"
26#include "src/gpu/gl/GrGLGpu.h"
27#include "src/gpu/mock/GrMockGpu.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040030#include "src/image/SkImage_GpuBase.h"
Robert Phillipsdb857ce2021-08-17 16:46:41 -040031#if SK_GPU_V1
Robert Phillipsde60d7a2021-09-13 17:17:45 -040032#include "src/gpu/ops/SmallPathAtlasMgr.h"
Robert Phillipsdb857ce2021-08-17 16:46:41 -040033#else
34// A vestigial definition for v2 that will never be instantiated
Robert Phillipsde60d7a2021-09-13 17:17:45 -040035namespace skgpu::v1 {
36class SmallPathAtlasMgr {
Robert Phillipsdb857ce2021-08-17 16:46:41 -040037public:
Robert Phillipsde60d7a2021-09-13 17:17:45 -040038 SmallPathAtlasMgr() { SkASSERT(0); }
Robert Phillipsdb857ce2021-08-17 16:46:41 -040039 void reset() { SkASSERT(0); }
40};
Robert Phillipsde60d7a2021-09-13 17:17:45 -040041}
Robert Phillipsdb857ce2021-08-17 16:46:41 -040042#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -050043#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050044#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050045#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050046#endif
47#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050048#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050049#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050050#ifdef SK_DIRECT3D
51#include "src/gpu/d3d/GrD3DGpu.h"
52#endif
Stephen White985741a2019-07-18 11:43:45 -040053#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050054#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040055#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040056#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Brian Salomon24069eb2020-06-24 10:19:52 -040058#if GR_TEST_UTILS
59# include "include/utils/SkRandom.h"
60# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
61# include <sanitizer/lsan_interface.h>
62# endif
63#endif
64
Adlai Holler9555f292020-10-09 09:41:14 -040065#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
66
Robert Phillipse7a959d2021-03-11 14:44:42 -050067GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050068 static std::atomic<uint32_t> nextID{1};
69 uint32_t id;
70 do {
71 id = nextID.fetch_add(1, std::memory_order_relaxed);
72 } while (id == SK_InvalidUniqueID);
73 return DirectContextID(id);
74}
75
Robert Phillipsad248452020-06-30 09:27:52 -040076GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040077 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050078 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040079}
Robert Phillipsa3457b82018-03-08 11:30:12 -050080
Robert Phillipsad248452020-06-30 09:27:52 -040081GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040082 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040083 // this if-test protects against the case where the context is being destroyed
84 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040085 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040086 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050087 }
Adlai Holler9555f292020-10-09 09:41:14 -040088
Greg Daniela89b4302021-01-29 10:48:40 -050089 // We need to make sure all work is finished on the gpu before we start releasing resources.
90 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
91
Adlai Holler9555f292020-10-09 09:41:14 -040092 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040093
94 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
95 if (fResourceCache) {
96 fResourceCache->releaseAll();
97 }
Brian Salomon91a88f02021-02-04 15:34:32 -050098 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
99 // async pixel result don't try to destroy buffers off thread.
100 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -0400101}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500102
Adlai Holler61a591c2020-10-12 12:38:33 -0400103sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
104 return INHERITED::threadSafeProxy();
105}
106
Adlai Hollera7a40442020-10-09 09:49:42 -0400107void GrDirectContext::resetGLTextureBindings() {
108 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
109 return;
110 }
111 fGpu->resetTextureBindings();
112}
113
114void GrDirectContext::resetContext(uint32_t state) {
115 ASSERT_SINGLE_OWNER
116 fGpu->markContextDirty(state);
117}
118
Robert Phillipsad248452020-06-30 09:27:52 -0400119void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400120 if (INHERITED::abandoned()) {
121 return;
122 }
123
Robert Phillipsad248452020-06-30 09:27:52 -0400124 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400125
Greg Daniela89b4302021-01-29 10:48:40 -0500126 // We need to make sure all work is finished on the gpu before we start releasing resources.
127 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
128
Adlai Hollera7a40442020-10-09 09:49:42 -0400129 fStrikeCache->freeAll();
130
131 fMappedBufferManager->abandon();
132
133 fResourceProvider->abandon();
134
Robert Phillipseb999bc2020-11-03 08:41:47 -0500135 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400136 fResourceCache->abandonAll();
137
138 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
139
Brian Salomon91a88f02021-02-04 15:34:32 -0500140 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400141 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500142
Robert Phillips079455c2020-08-11 15:18:46 -0400143 if (fSmallPathAtlasMgr) {
144 fSmallPathAtlasMgr->reset();
145 }
Robert Phillipsad248452020-06-30 09:27:52 -0400146 fAtlasManager->freeAll();
147}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500148
Adlai Hollera7a40442020-10-09 09:49:42 -0400149bool GrDirectContext::abandoned() {
150 if (INHERITED::abandoned()) {
151 return true;
152 }
153
154 if (fGpu && fGpu->isDeviceLost()) {
155 this->abandonContext();
156 return true;
157 }
158 return false;
159}
160
Adlai Holler61a591c2020-10-12 12:38:33 -0400161bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
162
Robert Phillipsad248452020-06-30 09:27:52 -0400163void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400164 if (INHERITED::abandoned()) {
165 return;
166 }
167
168 INHERITED::abandonContext();
169
Greg Daniela89b4302021-01-29 10:48:40 -0500170 // We need to make sure all work is finished on the gpu before we start releasing resources.
171 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
172
Adlai Holler61a591c2020-10-12 12:38:33 -0400173 fResourceProvider->abandon();
174
175 // Release all resources in the backend 3D API.
176 fResourceCache->releaseAll();
177
Brian Salomon91a88f02021-02-04 15:34:32 -0500178 // Must be after GrResourceCache::releaseAll().
179 fMappedBufferManager.reset();
180
Adlai Holler61a591c2020-10-12 12:38:33 -0400181 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400182 if (fSmallPathAtlasMgr) {
183 fSmallPathAtlasMgr->reset();
184 }
Robert Phillipsad248452020-06-30 09:27:52 -0400185 fAtlasManager->freeAll();
186}
Robert Phillips6db27c22019-05-01 10:43:56 -0400187
Robert Phillipsad248452020-06-30 09:27:52 -0400188void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400189 ASSERT_SINGLE_OWNER
190
191 if (this->abandoned()) {
192 return;
193 }
194
Robert Phillipsad248452020-06-30 09:27:52 -0400195 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400196 if (fSmallPathAtlasMgr) {
197 fSmallPathAtlasMgr->reset();
198 }
Robert Phillipsad248452020-06-30 09:27:52 -0400199 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500200
Adlai Holler4aa4c602020-10-12 13:58:52 -0400201 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
202 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
203 fStrikeCache->freeAll();
204
205 this->drawingManager()->freeGpuResources();
206
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400207 fResourceCache->purgeUnlockedResources();
Robert Phillipsad248452020-06-30 09:27:52 -0400208}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500209
Robert Phillipsad248452020-06-30 09:27:52 -0400210bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400211 ASSERT_SINGLE_OWNER
212 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400213 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500214 }
215
Robert Phillipsae67c522021-03-03 11:03:38 -0500216 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400217 if (!INHERITED::init()) {
218 return false;
219 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500220
Adlai Holler9555f292020-10-09 09:41:14 -0400221 SkASSERT(this->getTextBlobCache());
222 SkASSERT(this->threadSafeCache());
223
224 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400225 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
226 this->directContextID(),
227 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400228 fResourceCache->setProxyProvider(this->proxyProvider());
229 fResourceCache->setThreadSafeCache(this->threadSafeCache());
Adlai Hollerb34270e2021-04-16 11:23:52 -0400230#if GR_TEST_UTILS
231 if (this->options().fResourceCacheLimitOverride != -1) {
232 this->setResourceCacheLimit(this->options().fResourceCacheLimitOverride);
233 }
234#endif
Adlai Holler9555f292020-10-09 09:41:14 -0400235 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
236 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500237 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400238
239 fDidTestPMConversions = false;
240
241 // DDL TODO: we need to think through how the task group & persistent cache
242 // get passed on to/shared between all the DDLRecorders created with this context.
243 if (this->options().fExecutor) {
244 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
245 }
246
247 fPersistentCache = this->options().fPersistentCache;
Adlai Holler9555f292020-10-09 09:41:14 -0400248
Robert Phillipsad248452020-06-30 09:27:52 -0400249 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
250 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
251 // multitexturing supported only if range can represent the index + texcoords fully
252 !(this->caps()->shaderCaps()->floatIs32Bits() ||
253 this->caps()->shaderCaps()->integerSupport())) {
254 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
255 } else {
256 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
257 }
258
259 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
260
Robert Phillips3262bc82020-08-10 12:11:58 -0400261 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
262 this->options().fGlyphCacheTextureMaximumBytes,
263 allowMultitexturing);
264 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400265
266 return true;
267}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500268
Adlai Holler3a508e92020-10-12 13:58:01 -0400269void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
270 ASSERT_SINGLE_OWNER
271
272 if (resourceCount) {
273 *resourceCount = fResourceCache->getBudgetedResourceCount();
274 }
275 if (resourceBytes) {
276 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
277 }
278}
279
280size_t GrDirectContext::getResourceCachePurgeableBytes() const {
281 ASSERT_SINGLE_OWNER
282 return fResourceCache->getPurgeableBytes();
283}
284
285void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
286 ASSERT_SINGLE_OWNER
287 if (maxResources) {
288 *maxResources = -1;
289 }
290 if (maxResourceBytes) {
291 *maxResourceBytes = this->getResourceCacheLimit();
292 }
293}
294
295size_t GrDirectContext::getResourceCacheLimit() const {
296 ASSERT_SINGLE_OWNER
297 return fResourceCache->getMaxResourceBytes();
298}
299
300void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
301 ASSERT_SINGLE_OWNER
302 this->setResourceCacheLimit(maxResourceBytes);
303}
304
305void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
306 ASSERT_SINGLE_OWNER
307 fResourceCache->setLimit(maxResourceBytes);
308}
309
Adlai Holler4aa4c602020-10-12 13:58:52 -0400310void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
311 ASSERT_SINGLE_OWNER
312
313 if (this->abandoned()) {
314 return;
315 }
316
317 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
318 fResourceCache->purgeAsNeeded();
319
320 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
321 // place to purge stale blobs
322 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400323
324 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400325}
326
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400327void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed,
328 bool scratchResourcesOnly) {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400329 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
330
331 ASSERT_SINGLE_OWNER
332
333 if (this->abandoned()) {
334 return;
335 }
336
337 this->checkAsyncWorkCompletion();
338 fMappedBufferManager->process();
339 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
340
341 fResourceCache->purgeAsNeeded();
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400342 fResourceCache->purgeResourcesNotUsedSince(purgeTime, scratchResourcesOnly);
Adlai Holler4aa4c602020-10-12 13:58:52 -0400343
Adlai Holler4aa4c602020-10-12 13:58:52 -0400344 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
345 // place to purge stale blobs
346 this->getTextBlobCache()->purgeStaleBlobs();
347}
348
349void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
350 ASSERT_SINGLE_OWNER
351
352 if (this->abandoned()) {
353 return;
354 }
355
356 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
357}
358
Adlai Holler3acc69a2020-10-13 08:20:51 -0400359////////////////////////////////////////////////////////////////////////////////
360bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
361 bool deleteSemaphoresAfterWait) {
Greg Daniel063fdce2021-05-06 19:45:55 +0000362 if (!fGpu || !fGpu->caps()->semaphoreSupport()) {
Adlai Holler3acc69a2020-10-13 08:20:51 -0400363 return false;
364 }
365 GrWrapOwnership ownership =
366 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
367 for (int i = 0; i < numSemaphores; ++i) {
368 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
Robert Phillips1a82a4e2021-07-01 10:27:44 -0400369 waitSemaphores[i], GrSemaphoreWrapType::kWillWait, ownership);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400370 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
371 // to begin with. Therefore, it is fine to not wait on it.
372 if (sema) {
373 fGpu->waitSemaphore(sema.get());
374 }
375 }
376 return true;
377}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400378
Robert Phillipsde60d7a2021-09-13 17:17:45 -0400379skgpu::v1::SmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillipsdb857ce2021-08-17 16:46:41 -0400380#if SK_GPU_V1
Robert Phillips079455c2020-08-11 15:18:46 -0400381 if (!fSmallPathAtlasMgr) {
Robert Phillipsde60d7a2021-09-13 17:17:45 -0400382 fSmallPathAtlasMgr = std::make_unique<skgpu::v1::SmallPathAtlasMgr>();
Robert Phillips079455c2020-08-11 15:18:46 -0400383
384 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
385 }
386
387 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
388 return nullptr;
389 }
Robert Phillipsdb857ce2021-08-17 16:46:41 -0400390#endif
Robert Phillips079455c2020-08-11 15:18:46 -0400391
392 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400393}
394
Adlai Holler3acc69a2020-10-13 08:20:51 -0400395////////////////////////////////////////////////////////////////////////////////
396
397GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
398 ASSERT_SINGLE_OWNER
399 if (this->abandoned()) {
400 if (info.fFinishedProc) {
401 info.fFinishedProc(info.fFinishedContext);
402 }
403 if (info.fSubmittedProc) {
404 info.fSubmittedProc(info.fSubmittedContext, false);
405 }
406 return GrSemaphoresSubmitted::kNo;
407 }
408
Robert Phillips80bfda82020-11-12 09:23:36 -0500409 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
410 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400411}
412
413bool GrDirectContext::submit(bool syncCpu) {
414 ASSERT_SINGLE_OWNER
415 if (this->abandoned()) {
416 return false;
417 }
418
419 if (!fGpu) {
420 return false;
421 }
422
423 return fGpu->submitToGpu(syncCpu);
424}
425
426////////////////////////////////////////////////////////////////////////////////
427
428void GrDirectContext::checkAsyncWorkCompletion() {
429 if (fGpu) {
430 fGpu->checkFinishProcs();
431 }
432}
433
Greg Daniela89b4302021-01-29 10:48:40 -0500434void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
435 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
436 fGpu->finishOutstandingGpuWork();
437 this->checkAsyncWorkCompletion();
438 }
439}
440
Adlai Holler3acc69a2020-10-13 08:20:51 -0400441////////////////////////////////////////////////////////////////////////////////
442
443void GrDirectContext::storeVkPipelineCacheData() {
444 if (fGpu) {
445 fGpu->storeVkPipelineCacheData();
446 }
447}
448
449////////////////////////////////////////////////////////////////////////////////
450
451bool GrDirectContext::supportsDistanceFieldText() const {
452 return this->caps()->shaderCaps()->supportsDistanceFieldText();
453}
454
455//////////////////////////////////////////////////////////////////////////////
456
457void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
458 ASSERT_SINGLE_OWNER
459 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
460 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
461 this->getTextBlobCache()->usedBytes());
462}
463
Adlai Holler98dd0042020-10-13 10:04:00 -0400464GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
465 const GrBackendFormat& backendFormat,
466 GrMipmapped mipMapped,
467 GrRenderable renderable,
468 GrProtected isProtected) {
469 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
470 if (this->abandoned()) {
471 return GrBackendTexture();
472 }
473
474 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
475 mipMapped, isProtected);
476}
477
478GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
479 SkColorType skColorType,
480 GrMipmapped mipMapped,
481 GrRenderable renderable,
482 GrProtected isProtected) {
483 if (this->abandoned()) {
484 return GrBackendTexture();
485 }
486
487 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
488
489 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
490}
491
Brian Salomon71283232021-04-08 12:45:58 -0400492static GrBackendTexture create_and_clear_backend_texture(GrDirectContext* dContext,
493 SkISize dimensions,
494 const GrBackendFormat& backendFormat,
495 GrMipmapped mipMapped,
496 GrRenderable renderable,
497 GrProtected isProtected,
498 sk_sp<GrRefCntedCallback> finishedCallback,
499 std::array<float, 4> color) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400500 GrGpu* gpu = dContext->priv().getGpu();
Adlai Holler98dd0042020-10-13 10:04:00 -0400501 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
502 mipMapped, isProtected);
503 if (!beTex.isValid()) {
504 return {};
505 }
506
Brian Salomon71283232021-04-08 12:45:58 -0400507 if (!dContext->priv().getGpu()->clearBackendTexture(beTex,
508 std::move(finishedCallback),
509 color)) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400510 dContext->deleteBackendTexture(beTex);
511 return {};
512 }
513 return beTex;
514}
515
Brian Salomonea1d39b2021-04-01 17:06:52 -0400516static bool update_texture_with_pixmaps(GrDirectContext* context,
517 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500518 int numLevels,
519 const GrBackendTexture& backendTexture,
520 GrSurfaceOrigin textureOrigin,
521 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400522 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
523 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500524
Brian Salomonea1d39b2021-04-01 17:06:52 -0400525 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
526 return false;
527 }
528
529 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
530 kBorrow_GrWrapOwnership,
531 GrWrapCacheable::kNo,
532 kRW_GrIOType,
533 std::move(finishedCallback));
534 if (!proxy) {
535 return false;
536 }
537
538 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
539 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
Robert Phillips53eaa642021-08-10 13:49:51 -0400540 skgpu::SurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
Brian Salomonea1d39b2021-04-01 17:06:52 -0400541 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500542 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400543 tmpSrc[i] = src[i];
544 }
Brian Salomon75ee7372021-04-06 15:04:35 -0400545 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels)) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400546 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500547 }
548
Brian Salomonea1d39b2021-04-01 17:06:52 -0400549 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
550 GrFlushInfo info;
551 context->priv().drawingManager()->flushSurfaces({&p, 1},
552 SkSurface::BackendSurfaceAccess::kNoAccess,
553 info,
554 nullptr);
555 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500556}
557
Adlai Holler98dd0042020-10-13 10:04:00 -0400558GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
559 const GrBackendFormat& backendFormat,
560 const SkColor4f& color,
561 GrMipmapped mipMapped,
562 GrRenderable renderable,
563 GrProtected isProtected,
564 GrGpuFinishedProc finishedProc,
565 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500566 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400567
568 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
569 if (this->abandoned()) {
570 return {};
571 }
572
Brian Salomon71283232021-04-08 12:45:58 -0400573 return create_and_clear_backend_texture(this,
574 {width, height},
575 backendFormat,
576 mipMapped,
577 renderable,
578 isProtected,
579 std::move(finishedCallback),
580 color.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400581}
582
583GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
584 SkColorType skColorType,
585 const SkColor4f& color,
586 GrMipmapped mipMapped,
587 GrRenderable renderable,
588 GrProtected isProtected,
589 GrGpuFinishedProc finishedProc,
590 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500591 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400592
593 if (this->abandoned()) {
594 return {};
595 }
596
597 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
598 if (!format.isValid()) {
599 return {};
600 }
601
602 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
603 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
604
Brian Salomon71283232021-04-08 12:45:58 -0400605 return create_and_clear_backend_texture(this,
606 {width, height},
607 format,
608 mipMapped,
609 renderable,
610 isProtected,
611 std::move(finishedCallback),
612 swizzledColor.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400613}
614
615GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
616 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500617 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400618 GrRenderable renderable,
619 GrProtected isProtected,
620 GrGpuFinishedProc finishedProc,
621 GrGpuFinishedContext finishedContext) {
622 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
623
Brian Salomon694ff172020-11-04 16:54:28 -0500624 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400625
626 if (this->abandoned()) {
627 return {};
628 }
629
630 if (!srcData || numProvidedLevels <= 0) {
631 return {};
632 }
633
Adlai Holler98dd0042020-10-13 10:04:00 -0400634 SkColorType colorType = srcData[0].colorType();
635
636 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400637 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400638 mipMapped = GrMipmapped::kYes;
639 }
640
Adlai Holler98dd0042020-10-13 10:04:00 -0400641 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500642 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
643 srcData[0].height(),
644 backendFormat,
645 mipMapped,
646 renderable,
647 isProtected);
648 if (!beTex.isValid()) {
649 return {};
650 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400651 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500652 srcData,
653 numProvidedLevels,
654 beTex,
655 textureOrigin,
656 std::move(finishedCallback))) {
657 this->deleteBackendTexture(beTex);
658 return {};
659 }
660 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400661}
662
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400663bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
664 const SkColor4f& color,
665 GrGpuFinishedProc finishedProc,
666 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500667 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400668
669 if (this->abandoned()) {
670 return false;
671 }
672
Brian Salomon71283232021-04-08 12:45:58 -0400673 return fGpu->clearBackendTexture(backendTexture, std::move(finishedCallback), color.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400674}
675
676bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
677 SkColorType skColorType,
678 const SkColor4f& color,
679 GrGpuFinishedProc finishedProc,
680 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500681 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400682
683 if (this->abandoned()) {
684 return false;
685 }
686
687 GrBackendFormat format = backendTexture.getBackendFormat();
Brian Osman9f1e06a2021-08-10 14:39:18 -0400688 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400689
690 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
691 return false;
692 }
693
694 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
Brian Salomon71283232021-04-08 12:45:58 -0400695 SkColor4f swizzledColor = swizzle.applyTo(color);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400696
Brian Salomon71283232021-04-08 12:45:58 -0400697 return fGpu->clearBackendTexture(backendTexture,
698 std::move(finishedCallback),
699 swizzledColor.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400700}
701
702bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
703 const SkPixmap srcData[],
704 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500705 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400706 GrGpuFinishedProc finishedProc,
707 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500708 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400709
710 if (this->abandoned()) {
711 return false;
712 }
713
714 if (!srcData || numLevels <= 0) {
715 return false;
716 }
717
Brian Salomonea1d39b2021-04-01 17:06:52 -0400718 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400719 int numExpectedLevels = 1;
720 if (backendTexture.hasMipmaps()) {
721 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
722 backendTexture.height()) + 1;
723 }
724 if (numLevels != numExpectedLevels) {
725 return false;
726 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400727 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500728 srcData,
729 numLevels,
730 backendTexture,
731 textureOrigin,
732 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400733}
734
Adlai Holler64e13832020-10-13 08:21:56 -0400735//////////////////////////////////////////////////////////////////////////////
736
737static GrBackendTexture create_and_update_compressed_backend_texture(
738 GrDirectContext* dContext,
739 SkISize dimensions,
740 const GrBackendFormat& backendFormat,
741 GrMipmapped mipMapped,
742 GrProtected isProtected,
743 sk_sp<GrRefCntedCallback> finishedCallback,
Brian Salomon71283232021-04-08 12:45:58 -0400744 const void* data,
745 size_t size) {
Adlai Holler64e13832020-10-13 08:21:56 -0400746 GrGpu* gpu = dContext->priv().getGpu();
747
748 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
749 mipMapped, isProtected);
750 if (!beTex.isValid()) {
751 return {};
752 }
753
754 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
Brian Salomon71283232021-04-08 12:45:58 -0400755 beTex, std::move(finishedCallback), data, size)) {
Adlai Holler64e13832020-10-13 08:21:56 -0400756 dContext->deleteBackendTexture(beTex);
757 return {};
758 }
759 return beTex;
760}
761
Brian Salomon71283232021-04-08 12:45:58 -0400762GrBackendTexture GrDirectContext::createCompressedBackendTexture(
763 int width, int height,
764 const GrBackendFormat& backendFormat,
765 const SkColor4f& color,
766 GrMipmapped mipmapped,
767 GrProtected isProtected,
768 GrGpuFinishedProc finishedProc,
769 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400770 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500771 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400772
773 if (this->abandoned()) {
774 return {};
775 }
776
Brian Salomon71283232021-04-08 12:45:58 -0400777 SkImage::CompressionType compression = GrBackendFormatToCompressionType(backendFormat);
778 if (compression == SkImage::CompressionType::kNone) {
779 return {};
780 }
781
782 size_t size = SkCompressedDataSize(compression,
783 {width, height},
784 nullptr,
785 mipmapped == GrMipmapped::kYes);
786 auto storage = std::make_unique<char[]>(size);
787 GrFillInCompressedData(compression, {width, height}, mipmapped, storage.get(), color);
788 return create_and_update_compressed_backend_texture(this,
789 {width, height},
790 backendFormat,
791 mipmapped,
792 isProtected,
793 std::move(finishedCallback),
794 storage.get(),
795 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400796}
797
Brian Salomon71283232021-04-08 12:45:58 -0400798GrBackendTexture GrDirectContext::createCompressedBackendTexture(
799 int width, int height,
800 SkImage::CompressionType compression,
801 const SkColor4f& color,
802 GrMipmapped mipMapped,
803 GrProtected isProtected,
804 GrGpuFinishedProc finishedProc,
805 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400806 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
807 GrBackendFormat format = this->compressedBackendFormat(compression);
808 return this->createCompressedBackendTexture(width, height, format, color,
809 mipMapped, isProtected, finishedProc,
810 finishedContext);
811}
812
Brian Salomon71283232021-04-08 12:45:58 -0400813GrBackendTexture GrDirectContext::createCompressedBackendTexture(
814 int width, int height,
815 const GrBackendFormat& backendFormat,
816 const void* compressedData,
817 size_t dataSize,
818 GrMipmapped mipMapped,
819 GrProtected isProtected,
820 GrGpuFinishedProc finishedProc,
821 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400822 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500823 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400824
825 if (this->abandoned()) {
826 return {};
827 }
828
Brian Salomon71283232021-04-08 12:45:58 -0400829 return create_and_update_compressed_backend_texture(this,
830 {width, height},
831 backendFormat,
832 mipMapped,
833 isProtected,
834 std::move(finishedCallback),
835 compressedData,
836 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400837}
838
Brian Salomon71283232021-04-08 12:45:58 -0400839GrBackendTexture GrDirectContext::createCompressedBackendTexture(
840 int width, int height,
841 SkImage::CompressionType compression,
842 const void* data, size_t dataSize,
843 GrMipmapped mipMapped,
844 GrProtected isProtected,
845 GrGpuFinishedProc finishedProc,
846 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400847 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
848 GrBackendFormat format = this->compressedBackendFormat(compression);
849 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
850 isProtected, finishedProc, finishedContext);
851}
852
853bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
854 const SkColor4f& color,
855 GrGpuFinishedProc finishedProc,
856 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500857 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400858
859 if (this->abandoned()) {
860 return false;
861 }
862
Brian Salomon71283232021-04-08 12:45:58 -0400863 SkImage::CompressionType compression =
864 GrBackendFormatToCompressionType(backendTexture.getBackendFormat());
865 if (compression == SkImage::CompressionType::kNone) {
866 return {};
867 }
868 size_t size = SkCompressedDataSize(compression,
869 backendTexture.dimensions(),
870 nullptr,
871 backendTexture.hasMipmaps());
872 SkAutoMalloc storage(size);
873 GrFillInCompressedData(compression,
874 backendTexture.dimensions(),
875 backendTexture.mipmapped(),
876 static_cast<char*>(storage.get()),
877 color);
878 return fGpu->updateCompressedBackendTexture(backendTexture,
879 std::move(finishedCallback),
880 storage.get(),
881 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400882}
883
884bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
885 const void* compressedData,
886 size_t dataSize,
887 GrGpuFinishedProc finishedProc,
888 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500889 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400890
891 if (this->abandoned()) {
892 return false;
893 }
894
895 if (!compressedData) {
896 return false;
897 }
898
Brian Salomon71283232021-04-08 12:45:58 -0400899 return fGpu->updateCompressedBackendTexture(backendTexture,
900 std::move(finishedCallback),
901 compressedData,
902 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400903}
904
Adlai Holler6d0745b2020-10-13 13:29:00 -0400905//////////////////////////////////////////////////////////////////////////////
906
907bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
908 const GrBackendSurfaceMutableState& state,
909 GrBackendSurfaceMutableState* previousState,
910 GrGpuFinishedProc finishedProc,
911 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500912 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400913
914 if (this->abandoned()) {
915 return false;
916 }
917
918 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
919}
920
921
922bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
923 const GrBackendSurfaceMutableState& state,
924 GrBackendSurfaceMutableState* previousState,
925 GrGpuFinishedProc finishedProc,
926 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500927 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400928
929 if (this->abandoned()) {
930 return false;
931 }
932
933 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
934 std::move(callback));
935}
936
937void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
938 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
939 // For the Vulkan backend we still must destroy the backend texture when the context is
940 // abandoned.
941 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
942 return;
943 }
944
945 fGpu->deleteBackendTexture(backendTex);
946}
947
948//////////////////////////////////////////////////////////////////////////////
949
950bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
951 return fGpu->precompileShader(key, data);
952}
953
954#ifdef SK_ENABLE_DUMP_GPU
955#include "include/core/SkString.h"
956#include "src/utils/SkJSONWriter.h"
957SkString GrDirectContext::dump() const {
958 SkDynamicMemoryWStream stream;
959 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
960 writer.beginObject();
961
962 writer.appendString("backend", GrBackendApiToStr(this->backend()));
963
964 writer.appendName("caps");
965 this->caps()->dumpJSON(&writer);
966
967 writer.appendName("gpu");
968 this->fGpu->dumpJSON(&writer);
969
970 writer.appendName("context");
971 this->dumpJSON(&writer);
972
973 // Flush JSON to the memory stream
974 writer.endObject();
975 writer.flush();
976
977 // Null terminate the JSON data in the memory stream
978 stream.write8(0);
979
980 // Allocate a string big enough to hold all the data, then copy out of the stream
981 SkString result(stream.bytesWritten());
982 stream.copyToAndReset(result.writable_str());
983 return result;
984}
985#endif
986
John Rosascoa9b348f2019-11-08 13:18:15 -0800987#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400988
Robert Phillipsf4f80112020-07-13 16:13:31 -0400989/*************************************************************************************************/
990sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500991 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500992 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500993}
994
Robert Phillipsf4f80112020-07-13 16:13:31 -0400995sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400996 return MakeGL(nullptr, options);
997}
998
Robert Phillipsf4f80112020-07-13 16:13:31 -0400999sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -04001000 GrContextOptions defaultOptions;
1001 return MakeGL(nullptr, defaultOptions);
1002}
1003
Brian Salomon24069eb2020-06-24 10:19:52 -04001004#if GR_TEST_UTILS
1005GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
1006 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
1007 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
1008 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
1009 // on the thing it captures. So we leak the context.
1010 struct GetErrorContext {
1011 SkRandom fRandom;
1012 GrGLFunction<GrGLGetErrorFn> fGetError;
1013 };
1014
1015 auto errorContext = new GetErrorContext;
1016
1017#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
1018 __lsan_ignore_object(errorContext);
1019#endif
1020
1021 errorContext->fGetError = original;
1022
1023 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
1024 GrGLenum error = errorContext->fGetError();
1025 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
1026 error = GR_GL_OUT_OF_MEMORY;
1027 }
1028 return error;
1029 });
1030}
1031#endif
1032
Robert Phillipsf4f80112020-07-13 16:13:31 -04001033sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
1034 const GrContextOptions& options) {
1035 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -04001036#if GR_TEST_UTILS
1037 if (options.fRandomGLOOM) {
1038 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
1039 copy->fFunctions.fGetError =
1040 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
1041#if GR_GL_CHECK_ERROR
1042 // Suppress logging GL errors since we'll be synthetically generating them.
1043 copy->suppressErrorLogging();
1044#endif
1045 glInterface = std::move(copy);
1046 }
1047#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -04001048 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
1049 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001050 return nullptr;
1051 }
Robert Phillipsf4f80112020-07-13 16:13:31 -04001052 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001053}
John Rosascoa9b348f2019-11-08 13:18:15 -08001054#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001055
Robert Phillipsf4f80112020-07-13 16:13:31 -04001056/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001057sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1058 GrContextOptions defaultOptions;
1059 return MakeMock(mockOptions, defaultOptions);
1060}
1061
1062sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1063 const GrContextOptions& options) {
1064 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1065
1066 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1067 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001068 return nullptr;
1069 }
Chris Daltona378b452019-12-11 13:24:11 -05001070
Robert Phillipsf4f80112020-07-13 16:13:31 -04001071 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001072}
1073
Greg Danielb4d89562018-10-03 18:44:49 +00001074#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001075/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001076sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1077 GrContextOptions defaultOptions;
1078 return MakeVulkan(backendContext, defaultOptions);
1079}
1080
1081sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1082 const GrContextOptions& options) {
1083 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1084
1085 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1086 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001087 return nullptr;
1088 }
1089
Robert Phillipsf4f80112020-07-13 16:13:31 -04001090 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001091}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001092#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001093
1094#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001095/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001096sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001097 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001098 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001099}
1100
Jim Van Verth351c9b52020-11-12 15:21:11 -05001101sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1102 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001103 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001104
Jim Van Verth351c9b52020-11-12 15:21:11 -05001105 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001106 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001107 return nullptr;
1108 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001109
Robert Phillipsf4f80112020-07-13 16:13:31 -04001110 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001111}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001112
1113// deprecated
1114sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1115 GrContextOptions defaultOptions;
1116 return MakeMetal(device, queue, defaultOptions);
1117}
1118
1119// deprecated
1120// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1121sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1122 const GrContextOptions& options) {
1123 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1124 GrMtlBackendContext backendContext = {};
1125 backendContext.fDevice.reset(device);
1126 backendContext.fQueue.reset(queue);
1127
1128 return GrDirectContext::MakeMetal(backendContext, options);
1129}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001130#endif
1131
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001132#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001133/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001134sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1135 GrContextOptions defaultOptions;
1136 return MakeDirect3D(backendContext, defaultOptions);
1137}
1138
1139sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1140 const GrContextOptions& options) {
1141 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1142
1143 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1144 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001145 return nullptr;
1146 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001147
Robert Phillipsf4f80112020-07-13 16:13:31 -04001148 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001149}
1150#endif
1151
Stephen White985741a2019-07-18 11:43:45 -04001152#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001153/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001154sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001155 GrContextOptions defaultOptions;
1156 return MakeDawn(device, defaultOptions);
1157}
1158
Robert Phillipsf4f80112020-07-13 16:13:31 -04001159sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1160 const GrContextOptions& options) {
1161 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001162
Robert Phillipsf4f80112020-07-13 16:13:31 -04001163 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1164 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001165 return nullptr;
1166 }
1167
Robert Phillipsf4f80112020-07-13 16:13:31 -04001168 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001169}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001170
Stephen White985741a2019-07-18 11:43:45 -04001171#endif