blob: dd287b82f21857a9c04ccbf52314cd879155a5ee [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Brian Salomon71283232021-04-08 12:45:58 -040013#include "src/core/SkAutoMalloc.h"
Adlai Holler9555f292020-10-09 09:41:14 -040014#include "src/core/SkTaskGroup.h"
Robert Phillips06273bc2021-08-11 15:43:50 -040015#include "src/core/SkTraceEvent.h"
Brian Salomon71283232021-04-08 12:45:58 -040016#include "src/gpu/GrBackendUtils.h"
Adlai Holler9555f292020-10-09 09:41:14 -040017#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040019#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040020#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050021#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040022#include "src/gpu/GrResourceProvider.h"
23#include "src/gpu/GrShaderUtils.h"
Robert Phillips53eaa642021-08-10 13:49:51 -040024#include "src/gpu/SurfaceContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/effects/GrSkSLFP.h"
26#include "src/gpu/gl/GrGLGpu.h"
27#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040028#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040029#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050030#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040031#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050033#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
36#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050037#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050038#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050039#ifdef SK_DIRECT3D
40#include "src/gpu/d3d/GrD3DGpu.h"
41#endif
Stephen White985741a2019-07-18 11:43:45 -040042#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050043#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040044#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040045#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050046
Brian Salomon24069eb2020-06-24 10:19:52 -040047#if GR_TEST_UTILS
48# include "include/utils/SkRandom.h"
49# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
50# include <sanitizer/lsan_interface.h>
51# endif
52#endif
53
Adlai Holler9555f292020-10-09 09:41:14 -040054#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
55
Robert Phillipse7a959d2021-03-11 14:44:42 -050056GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050057 static std::atomic<uint32_t> nextID{1};
58 uint32_t id;
59 do {
60 id = nextID.fetch_add(1, std::memory_order_relaxed);
61 } while (id == SK_InvalidUniqueID);
62 return DirectContextID(id);
63}
64
Robert Phillipsad248452020-06-30 09:27:52 -040065GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040066 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050067 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040068}
Robert Phillipsa3457b82018-03-08 11:30:12 -050069
Robert Phillipsad248452020-06-30 09:27:52 -040070GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040071 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040072 // this if-test protects against the case where the context is being destroyed
73 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040074 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040075 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050076 }
Adlai Holler9555f292020-10-09 09:41:14 -040077
Greg Daniela89b4302021-01-29 10:48:40 -050078 // We need to make sure all work is finished on the gpu before we start releasing resources.
79 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
80
Adlai Holler9555f292020-10-09 09:41:14 -040081 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040082
83 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
84 if (fResourceCache) {
85 fResourceCache->releaseAll();
86 }
Brian Salomon91a88f02021-02-04 15:34:32 -050087 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
88 // async pixel result don't try to destroy buffers off thread.
89 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040090}
Robert Phillipsa3457b82018-03-08 11:30:12 -050091
Adlai Holler61a591c2020-10-12 12:38:33 -040092sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
93 return INHERITED::threadSafeProxy();
94}
95
Adlai Hollera7a40442020-10-09 09:49:42 -040096void GrDirectContext::resetGLTextureBindings() {
97 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
98 return;
99 }
100 fGpu->resetTextureBindings();
101}
102
103void GrDirectContext::resetContext(uint32_t state) {
104 ASSERT_SINGLE_OWNER
105 fGpu->markContextDirty(state);
106}
107
Robert Phillipsad248452020-06-30 09:27:52 -0400108void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400109 if (INHERITED::abandoned()) {
110 return;
111 }
112
Robert Phillipsad248452020-06-30 09:27:52 -0400113 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400114
Greg Daniela89b4302021-01-29 10:48:40 -0500115 // We need to make sure all work is finished on the gpu before we start releasing resources.
116 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
117
Adlai Hollera7a40442020-10-09 09:49:42 -0400118 fStrikeCache->freeAll();
119
120 fMappedBufferManager->abandon();
121
122 fResourceProvider->abandon();
123
Robert Phillipseb999bc2020-11-03 08:41:47 -0500124 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400125 fResourceCache->abandonAll();
126
127 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
128
Brian Salomon91a88f02021-02-04 15:34:32 -0500129 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400130 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500131
Robert Phillips079455c2020-08-11 15:18:46 -0400132 if (fSmallPathAtlasMgr) {
133 fSmallPathAtlasMgr->reset();
134 }
Robert Phillipsad248452020-06-30 09:27:52 -0400135 fAtlasManager->freeAll();
136}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500137
Adlai Hollera7a40442020-10-09 09:49:42 -0400138bool GrDirectContext::abandoned() {
139 if (INHERITED::abandoned()) {
140 return true;
141 }
142
143 if (fGpu && fGpu->isDeviceLost()) {
144 this->abandonContext();
145 return true;
146 }
147 return false;
148}
149
Adlai Holler61a591c2020-10-12 12:38:33 -0400150bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
151
Robert Phillipsad248452020-06-30 09:27:52 -0400152void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400153 if (INHERITED::abandoned()) {
154 return;
155 }
156
157 INHERITED::abandonContext();
158
Greg Daniela89b4302021-01-29 10:48:40 -0500159 // We need to make sure all work is finished on the gpu before we start releasing resources.
160 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
161
Adlai Holler61a591c2020-10-12 12:38:33 -0400162 fResourceProvider->abandon();
163
164 // Release all resources in the backend 3D API.
165 fResourceCache->releaseAll();
166
Brian Salomon91a88f02021-02-04 15:34:32 -0500167 // Must be after GrResourceCache::releaseAll().
168 fMappedBufferManager.reset();
169
Adlai Holler61a591c2020-10-12 12:38:33 -0400170 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400171 if (fSmallPathAtlasMgr) {
172 fSmallPathAtlasMgr->reset();
173 }
Robert Phillipsad248452020-06-30 09:27:52 -0400174 fAtlasManager->freeAll();
175}
Robert Phillips6db27c22019-05-01 10:43:56 -0400176
Robert Phillipsad248452020-06-30 09:27:52 -0400177void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400178 ASSERT_SINGLE_OWNER
179
180 if (this->abandoned()) {
181 return;
182 }
183
Robert Phillipsad248452020-06-30 09:27:52 -0400184 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400185 if (fSmallPathAtlasMgr) {
186 fSmallPathAtlasMgr->reset();
187 }
Robert Phillipsad248452020-06-30 09:27:52 -0400188 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500189
Adlai Holler4aa4c602020-10-12 13:58:52 -0400190 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
191 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
192 fStrikeCache->freeAll();
193
194 this->drawingManager()->freeGpuResources();
195
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400196 fResourceCache->purgeUnlockedResources();
Robert Phillipsad248452020-06-30 09:27:52 -0400197}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500198
Robert Phillipsad248452020-06-30 09:27:52 -0400199bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400200 ASSERT_SINGLE_OWNER
201 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400202 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500203 }
204
Robert Phillipsae67c522021-03-03 11:03:38 -0500205 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400206 if (!INHERITED::init()) {
207 return false;
208 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500209
Adlai Holler9555f292020-10-09 09:41:14 -0400210 SkASSERT(this->getTextBlobCache());
211 SkASSERT(this->threadSafeCache());
212
213 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400214 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
215 this->directContextID(),
216 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400217 fResourceCache->setProxyProvider(this->proxyProvider());
218 fResourceCache->setThreadSafeCache(this->threadSafeCache());
Adlai Hollerb34270e2021-04-16 11:23:52 -0400219#if GR_TEST_UTILS
220 if (this->options().fResourceCacheLimitOverride != -1) {
221 this->setResourceCacheLimit(this->options().fResourceCacheLimitOverride);
222 }
223#endif
Adlai Holler9555f292020-10-09 09:41:14 -0400224 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
225 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500226 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400227
228 fDidTestPMConversions = false;
229
230 // DDL TODO: we need to think through how the task group & persistent cache
231 // get passed on to/shared between all the DDLRecorders created with this context.
232 if (this->options().fExecutor) {
233 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
234 }
235
236 fPersistentCache = this->options().fPersistentCache;
Adlai Holler9555f292020-10-09 09:41:14 -0400237
Robert Phillipsad248452020-06-30 09:27:52 -0400238 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
239 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
240 // multitexturing supported only if range can represent the index + texcoords fully
241 !(this->caps()->shaderCaps()->floatIs32Bits() ||
242 this->caps()->shaderCaps()->integerSupport())) {
243 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
244 } else {
245 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
246 }
247
248 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
249
Robert Phillips3262bc82020-08-10 12:11:58 -0400250 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
251 this->options().fGlyphCacheTextureMaximumBytes,
252 allowMultitexturing);
253 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400254
255 return true;
256}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500257
Adlai Holler3a508e92020-10-12 13:58:01 -0400258void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
259 ASSERT_SINGLE_OWNER
260
261 if (resourceCount) {
262 *resourceCount = fResourceCache->getBudgetedResourceCount();
263 }
264 if (resourceBytes) {
265 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
266 }
267}
268
269size_t GrDirectContext::getResourceCachePurgeableBytes() const {
270 ASSERT_SINGLE_OWNER
271 return fResourceCache->getPurgeableBytes();
272}
273
274void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
275 ASSERT_SINGLE_OWNER
276 if (maxResources) {
277 *maxResources = -1;
278 }
279 if (maxResourceBytes) {
280 *maxResourceBytes = this->getResourceCacheLimit();
281 }
282}
283
284size_t GrDirectContext::getResourceCacheLimit() const {
285 ASSERT_SINGLE_OWNER
286 return fResourceCache->getMaxResourceBytes();
287}
288
289void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
290 ASSERT_SINGLE_OWNER
291 this->setResourceCacheLimit(maxResourceBytes);
292}
293
294void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
295 ASSERT_SINGLE_OWNER
296 fResourceCache->setLimit(maxResourceBytes);
297}
298
Adlai Holler4aa4c602020-10-12 13:58:52 -0400299void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
300 ASSERT_SINGLE_OWNER
301
302 if (this->abandoned()) {
303 return;
304 }
305
306 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
307 fResourceCache->purgeAsNeeded();
308
309 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
310 // place to purge stale blobs
311 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400312
313 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400314}
315
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400316void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed,
317 bool scratchResourcesOnly) {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400318 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
319
320 ASSERT_SINGLE_OWNER
321
322 if (this->abandoned()) {
323 return;
324 }
325
326 this->checkAsyncWorkCompletion();
327 fMappedBufferManager->process();
328 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
329
330 fResourceCache->purgeAsNeeded();
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400331 fResourceCache->purgeResourcesNotUsedSince(purgeTime, scratchResourcesOnly);
Adlai Holler4aa4c602020-10-12 13:58:52 -0400332
Adlai Holler4aa4c602020-10-12 13:58:52 -0400333 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
334 // place to purge stale blobs
335 this->getTextBlobCache()->purgeStaleBlobs();
336}
337
338void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
339 ASSERT_SINGLE_OWNER
340
341 if (this->abandoned()) {
342 return;
343 }
344
345 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
346}
347
Adlai Holler3acc69a2020-10-13 08:20:51 -0400348////////////////////////////////////////////////////////////////////////////////
349bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
350 bool deleteSemaphoresAfterWait) {
Greg Daniel063fdce2021-05-06 19:45:55 +0000351 if (!fGpu || !fGpu->caps()->semaphoreSupport()) {
Adlai Holler3acc69a2020-10-13 08:20:51 -0400352 return false;
353 }
354 GrWrapOwnership ownership =
355 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
356 for (int i = 0; i < numSemaphores; ++i) {
357 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
Robert Phillips1a82a4e2021-07-01 10:27:44 -0400358 waitSemaphores[i], GrSemaphoreWrapType::kWillWait, ownership);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400359 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
360 // to begin with. Therefore, it is fine to not wait on it.
361 if (sema) {
362 fGpu->waitSemaphore(sema.get());
363 }
364 }
365 return true;
366}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400367
Robert Phillips5edf5102020-08-10 16:30:36 -0400368GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400369 if (!fSmallPathAtlasMgr) {
370 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
371
372 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
373 }
374
375 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
376 return nullptr;
377 }
378
379 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400380}
381
Adlai Holler3acc69a2020-10-13 08:20:51 -0400382////////////////////////////////////////////////////////////////////////////////
383
384GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
385 ASSERT_SINGLE_OWNER
386 if (this->abandoned()) {
387 if (info.fFinishedProc) {
388 info.fFinishedProc(info.fFinishedContext);
389 }
390 if (info.fSubmittedProc) {
391 info.fSubmittedProc(info.fSubmittedContext, false);
392 }
393 return GrSemaphoresSubmitted::kNo;
394 }
395
Robert Phillips80bfda82020-11-12 09:23:36 -0500396 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
397 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400398}
399
400bool GrDirectContext::submit(bool syncCpu) {
401 ASSERT_SINGLE_OWNER
402 if (this->abandoned()) {
403 return false;
404 }
405
406 if (!fGpu) {
407 return false;
408 }
409
410 return fGpu->submitToGpu(syncCpu);
411}
412
413////////////////////////////////////////////////////////////////////////////////
414
415void GrDirectContext::checkAsyncWorkCompletion() {
416 if (fGpu) {
417 fGpu->checkFinishProcs();
418 }
419}
420
Greg Daniela89b4302021-01-29 10:48:40 -0500421void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
422 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
423 fGpu->finishOutstandingGpuWork();
424 this->checkAsyncWorkCompletion();
425 }
426}
427
Adlai Holler3acc69a2020-10-13 08:20:51 -0400428////////////////////////////////////////////////////////////////////////////////
429
430void GrDirectContext::storeVkPipelineCacheData() {
431 if (fGpu) {
432 fGpu->storeVkPipelineCacheData();
433 }
434}
435
436////////////////////////////////////////////////////////////////////////////////
437
438bool GrDirectContext::supportsDistanceFieldText() const {
439 return this->caps()->shaderCaps()->supportsDistanceFieldText();
440}
441
442//////////////////////////////////////////////////////////////////////////////
443
444void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
445 ASSERT_SINGLE_OWNER
446 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
447 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
448 this->getTextBlobCache()->usedBytes());
449}
450
Adlai Holler98dd0042020-10-13 10:04:00 -0400451GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
452 const GrBackendFormat& backendFormat,
453 GrMipmapped mipMapped,
454 GrRenderable renderable,
455 GrProtected isProtected) {
456 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
457 if (this->abandoned()) {
458 return GrBackendTexture();
459 }
460
461 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
462 mipMapped, isProtected);
463}
464
465GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
466 SkColorType skColorType,
467 GrMipmapped mipMapped,
468 GrRenderable renderable,
469 GrProtected isProtected) {
470 if (this->abandoned()) {
471 return GrBackendTexture();
472 }
473
474 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
475
476 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
477}
478
Brian Salomon71283232021-04-08 12:45:58 -0400479static GrBackendTexture create_and_clear_backend_texture(GrDirectContext* dContext,
480 SkISize dimensions,
481 const GrBackendFormat& backendFormat,
482 GrMipmapped mipMapped,
483 GrRenderable renderable,
484 GrProtected isProtected,
485 sk_sp<GrRefCntedCallback> finishedCallback,
486 std::array<float, 4> color) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400487 GrGpu* gpu = dContext->priv().getGpu();
Adlai Holler98dd0042020-10-13 10:04:00 -0400488 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
489 mipMapped, isProtected);
490 if (!beTex.isValid()) {
491 return {};
492 }
493
Brian Salomon71283232021-04-08 12:45:58 -0400494 if (!dContext->priv().getGpu()->clearBackendTexture(beTex,
495 std::move(finishedCallback),
496 color)) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400497 dContext->deleteBackendTexture(beTex);
498 return {};
499 }
500 return beTex;
501}
502
Brian Salomonea1d39b2021-04-01 17:06:52 -0400503static bool update_texture_with_pixmaps(GrDirectContext* context,
504 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500505 int numLevels,
506 const GrBackendTexture& backendTexture,
507 GrSurfaceOrigin textureOrigin,
508 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400509 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
510 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500511
Brian Salomonea1d39b2021-04-01 17:06:52 -0400512 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
513 return false;
514 }
515
516 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
517 kBorrow_GrWrapOwnership,
518 GrWrapCacheable::kNo,
519 kRW_GrIOType,
520 std::move(finishedCallback));
521 if (!proxy) {
522 return false;
523 }
524
525 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
526 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
Robert Phillips53eaa642021-08-10 13:49:51 -0400527 skgpu::SurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
Brian Salomonea1d39b2021-04-01 17:06:52 -0400528 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500529 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400530 tmpSrc[i] = src[i];
531 }
Brian Salomon75ee7372021-04-06 15:04:35 -0400532 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels)) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400533 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500534 }
535
Brian Salomonea1d39b2021-04-01 17:06:52 -0400536 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
537 GrFlushInfo info;
538 context->priv().drawingManager()->flushSurfaces({&p, 1},
539 SkSurface::BackendSurfaceAccess::kNoAccess,
540 info,
541 nullptr);
542 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500543}
544
Adlai Holler98dd0042020-10-13 10:04:00 -0400545GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
546 const GrBackendFormat& backendFormat,
547 const SkColor4f& color,
548 GrMipmapped mipMapped,
549 GrRenderable renderable,
550 GrProtected isProtected,
551 GrGpuFinishedProc finishedProc,
552 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500553 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400554
555 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
556 if (this->abandoned()) {
557 return {};
558 }
559
Brian Salomon71283232021-04-08 12:45:58 -0400560 return create_and_clear_backend_texture(this,
561 {width, height},
562 backendFormat,
563 mipMapped,
564 renderable,
565 isProtected,
566 std::move(finishedCallback),
567 color.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400568}
569
570GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
571 SkColorType skColorType,
572 const SkColor4f& color,
573 GrMipmapped mipMapped,
574 GrRenderable renderable,
575 GrProtected isProtected,
576 GrGpuFinishedProc finishedProc,
577 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500578 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400579
580 if (this->abandoned()) {
581 return {};
582 }
583
584 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
585 if (!format.isValid()) {
586 return {};
587 }
588
589 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
590 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
591
Brian Salomon71283232021-04-08 12:45:58 -0400592 return create_and_clear_backend_texture(this,
593 {width, height},
594 format,
595 mipMapped,
596 renderable,
597 isProtected,
598 std::move(finishedCallback),
599 swizzledColor.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400600}
601
602GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
603 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500604 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400605 GrRenderable renderable,
606 GrProtected isProtected,
607 GrGpuFinishedProc finishedProc,
608 GrGpuFinishedContext finishedContext) {
609 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
610
Brian Salomon694ff172020-11-04 16:54:28 -0500611 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400612
613 if (this->abandoned()) {
614 return {};
615 }
616
617 if (!srcData || numProvidedLevels <= 0) {
618 return {};
619 }
620
Adlai Holler98dd0042020-10-13 10:04:00 -0400621 SkColorType colorType = srcData[0].colorType();
622
623 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400624 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400625 mipMapped = GrMipmapped::kYes;
626 }
627
Adlai Holler98dd0042020-10-13 10:04:00 -0400628 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500629 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
630 srcData[0].height(),
631 backendFormat,
632 mipMapped,
633 renderable,
634 isProtected);
635 if (!beTex.isValid()) {
636 return {};
637 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400638 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500639 srcData,
640 numProvidedLevels,
641 beTex,
642 textureOrigin,
643 std::move(finishedCallback))) {
644 this->deleteBackendTexture(beTex);
645 return {};
646 }
647 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400648}
649
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400650bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
651 const SkColor4f& color,
652 GrGpuFinishedProc finishedProc,
653 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500654 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400655
656 if (this->abandoned()) {
657 return false;
658 }
659
Brian Salomon71283232021-04-08 12:45:58 -0400660 return fGpu->clearBackendTexture(backendTexture, std::move(finishedCallback), color.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400661}
662
663bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
664 SkColorType skColorType,
665 const SkColor4f& color,
666 GrGpuFinishedProc finishedProc,
667 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500668 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400669
670 if (this->abandoned()) {
671 return false;
672 }
673
674 GrBackendFormat format = backendTexture.getBackendFormat();
675 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
676
677 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
678 return false;
679 }
680
681 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
Brian Salomon71283232021-04-08 12:45:58 -0400682 SkColor4f swizzledColor = swizzle.applyTo(color);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400683
Brian Salomon71283232021-04-08 12:45:58 -0400684 return fGpu->clearBackendTexture(backendTexture,
685 std::move(finishedCallback),
686 swizzledColor.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400687}
688
689bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
690 const SkPixmap srcData[],
691 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500692 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400693 GrGpuFinishedProc finishedProc,
694 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500695 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400696
697 if (this->abandoned()) {
698 return false;
699 }
700
701 if (!srcData || numLevels <= 0) {
702 return false;
703 }
704
Brian Salomonea1d39b2021-04-01 17:06:52 -0400705 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400706 int numExpectedLevels = 1;
707 if (backendTexture.hasMipmaps()) {
708 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
709 backendTexture.height()) + 1;
710 }
711 if (numLevels != numExpectedLevels) {
712 return false;
713 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400714 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500715 srcData,
716 numLevels,
717 backendTexture,
718 textureOrigin,
719 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400720}
721
Adlai Holler64e13832020-10-13 08:21:56 -0400722//////////////////////////////////////////////////////////////////////////////
723
724static GrBackendTexture create_and_update_compressed_backend_texture(
725 GrDirectContext* dContext,
726 SkISize dimensions,
727 const GrBackendFormat& backendFormat,
728 GrMipmapped mipMapped,
729 GrProtected isProtected,
730 sk_sp<GrRefCntedCallback> finishedCallback,
Brian Salomon71283232021-04-08 12:45:58 -0400731 const void* data,
732 size_t size) {
Adlai Holler64e13832020-10-13 08:21:56 -0400733 GrGpu* gpu = dContext->priv().getGpu();
734
735 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
736 mipMapped, isProtected);
737 if (!beTex.isValid()) {
738 return {};
739 }
740
741 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
Brian Salomon71283232021-04-08 12:45:58 -0400742 beTex, std::move(finishedCallback), data, size)) {
Adlai Holler64e13832020-10-13 08:21:56 -0400743 dContext->deleteBackendTexture(beTex);
744 return {};
745 }
746 return beTex;
747}
748
Brian Salomon71283232021-04-08 12:45:58 -0400749GrBackendTexture GrDirectContext::createCompressedBackendTexture(
750 int width, int height,
751 const GrBackendFormat& backendFormat,
752 const SkColor4f& color,
753 GrMipmapped mipmapped,
754 GrProtected isProtected,
755 GrGpuFinishedProc finishedProc,
756 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400757 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500758 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400759
760 if (this->abandoned()) {
761 return {};
762 }
763
Brian Salomon71283232021-04-08 12:45:58 -0400764 SkImage::CompressionType compression = GrBackendFormatToCompressionType(backendFormat);
765 if (compression == SkImage::CompressionType::kNone) {
766 return {};
767 }
768
769 size_t size = SkCompressedDataSize(compression,
770 {width, height},
771 nullptr,
772 mipmapped == GrMipmapped::kYes);
773 auto storage = std::make_unique<char[]>(size);
774 GrFillInCompressedData(compression, {width, height}, mipmapped, storage.get(), color);
775 return create_and_update_compressed_backend_texture(this,
776 {width, height},
777 backendFormat,
778 mipmapped,
779 isProtected,
780 std::move(finishedCallback),
781 storage.get(),
782 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400783}
784
Brian Salomon71283232021-04-08 12:45:58 -0400785GrBackendTexture GrDirectContext::createCompressedBackendTexture(
786 int width, int height,
787 SkImage::CompressionType compression,
788 const SkColor4f& color,
789 GrMipmapped mipMapped,
790 GrProtected isProtected,
791 GrGpuFinishedProc finishedProc,
792 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400793 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
794 GrBackendFormat format = this->compressedBackendFormat(compression);
795 return this->createCompressedBackendTexture(width, height, format, color,
796 mipMapped, isProtected, finishedProc,
797 finishedContext);
798}
799
Brian Salomon71283232021-04-08 12:45:58 -0400800GrBackendTexture GrDirectContext::createCompressedBackendTexture(
801 int width, int height,
802 const GrBackendFormat& backendFormat,
803 const void* compressedData,
804 size_t dataSize,
805 GrMipmapped mipMapped,
806 GrProtected isProtected,
807 GrGpuFinishedProc finishedProc,
808 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400809 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500810 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400811
812 if (this->abandoned()) {
813 return {};
814 }
815
Brian Salomon71283232021-04-08 12:45:58 -0400816 return create_and_update_compressed_backend_texture(this,
817 {width, height},
818 backendFormat,
819 mipMapped,
820 isProtected,
821 std::move(finishedCallback),
822 compressedData,
823 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400824}
825
Brian Salomon71283232021-04-08 12:45:58 -0400826GrBackendTexture GrDirectContext::createCompressedBackendTexture(
827 int width, int height,
828 SkImage::CompressionType compression,
829 const void* data, size_t dataSize,
830 GrMipmapped mipMapped,
831 GrProtected isProtected,
832 GrGpuFinishedProc finishedProc,
833 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400834 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
835 GrBackendFormat format = this->compressedBackendFormat(compression);
836 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
837 isProtected, finishedProc, finishedContext);
838}
839
840bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
841 const SkColor4f& color,
842 GrGpuFinishedProc finishedProc,
843 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500844 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400845
846 if (this->abandoned()) {
847 return false;
848 }
849
Brian Salomon71283232021-04-08 12:45:58 -0400850 SkImage::CompressionType compression =
851 GrBackendFormatToCompressionType(backendTexture.getBackendFormat());
852 if (compression == SkImage::CompressionType::kNone) {
853 return {};
854 }
855 size_t size = SkCompressedDataSize(compression,
856 backendTexture.dimensions(),
857 nullptr,
858 backendTexture.hasMipmaps());
859 SkAutoMalloc storage(size);
860 GrFillInCompressedData(compression,
861 backendTexture.dimensions(),
862 backendTexture.mipmapped(),
863 static_cast<char*>(storage.get()),
864 color);
865 return fGpu->updateCompressedBackendTexture(backendTexture,
866 std::move(finishedCallback),
867 storage.get(),
868 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400869}
870
871bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
872 const void* compressedData,
873 size_t dataSize,
874 GrGpuFinishedProc finishedProc,
875 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500876 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400877
878 if (this->abandoned()) {
879 return false;
880 }
881
882 if (!compressedData) {
883 return false;
884 }
885
Brian Salomon71283232021-04-08 12:45:58 -0400886 return fGpu->updateCompressedBackendTexture(backendTexture,
887 std::move(finishedCallback),
888 compressedData,
889 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400890}
891
Adlai Holler6d0745b2020-10-13 13:29:00 -0400892//////////////////////////////////////////////////////////////////////////////
893
894bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
895 const GrBackendSurfaceMutableState& state,
896 GrBackendSurfaceMutableState* previousState,
897 GrGpuFinishedProc finishedProc,
898 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500899 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400900
901 if (this->abandoned()) {
902 return false;
903 }
904
905 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
906}
907
908
909bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
910 const GrBackendSurfaceMutableState& state,
911 GrBackendSurfaceMutableState* previousState,
912 GrGpuFinishedProc finishedProc,
913 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500914 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400915
916 if (this->abandoned()) {
917 return false;
918 }
919
920 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
921 std::move(callback));
922}
923
924void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
925 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
926 // For the Vulkan backend we still must destroy the backend texture when the context is
927 // abandoned.
928 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
929 return;
930 }
931
932 fGpu->deleteBackendTexture(backendTex);
933}
934
935//////////////////////////////////////////////////////////////////////////////
936
937bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
938 return fGpu->precompileShader(key, data);
939}
940
941#ifdef SK_ENABLE_DUMP_GPU
942#include "include/core/SkString.h"
943#include "src/utils/SkJSONWriter.h"
944SkString GrDirectContext::dump() const {
945 SkDynamicMemoryWStream stream;
946 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
947 writer.beginObject();
948
949 writer.appendString("backend", GrBackendApiToStr(this->backend()));
950
951 writer.appendName("caps");
952 this->caps()->dumpJSON(&writer);
953
954 writer.appendName("gpu");
955 this->fGpu->dumpJSON(&writer);
956
957 writer.appendName("context");
958 this->dumpJSON(&writer);
959
960 // Flush JSON to the memory stream
961 writer.endObject();
962 writer.flush();
963
964 // Null terminate the JSON data in the memory stream
965 stream.write8(0);
966
967 // Allocate a string big enough to hold all the data, then copy out of the stream
968 SkString result(stream.bytesWritten());
969 stream.copyToAndReset(result.writable_str());
970 return result;
971}
972#endif
973
John Rosascoa9b348f2019-11-08 13:18:15 -0800974#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400975
Robert Phillipsf4f80112020-07-13 16:13:31 -0400976/*************************************************************************************************/
977sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500978 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500979 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500980}
981
Robert Phillipsf4f80112020-07-13 16:13:31 -0400982sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400983 return MakeGL(nullptr, options);
984}
985
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400987 GrContextOptions defaultOptions;
988 return MakeGL(nullptr, defaultOptions);
989}
990
Brian Salomon24069eb2020-06-24 10:19:52 -0400991#if GR_TEST_UTILS
992GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
993 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
994 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
995 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
996 // on the thing it captures. So we leak the context.
997 struct GetErrorContext {
998 SkRandom fRandom;
999 GrGLFunction<GrGLGetErrorFn> fGetError;
1000 };
1001
1002 auto errorContext = new GetErrorContext;
1003
1004#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
1005 __lsan_ignore_object(errorContext);
1006#endif
1007
1008 errorContext->fGetError = original;
1009
1010 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
1011 GrGLenum error = errorContext->fGetError();
1012 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
1013 error = GR_GL_OUT_OF_MEMORY;
1014 }
1015 return error;
1016 });
1017}
1018#endif
1019
Robert Phillipsf4f80112020-07-13 16:13:31 -04001020sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
1021 const GrContextOptions& options) {
1022 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -04001023#if GR_TEST_UTILS
1024 if (options.fRandomGLOOM) {
1025 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
1026 copy->fFunctions.fGetError =
1027 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
1028#if GR_GL_CHECK_ERROR
1029 // Suppress logging GL errors since we'll be synthetically generating them.
1030 copy->suppressErrorLogging();
1031#endif
1032 glInterface = std::move(copy);
1033 }
1034#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -04001035 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
1036 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001037 return nullptr;
1038 }
Robert Phillipsf4f80112020-07-13 16:13:31 -04001039 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001040}
John Rosascoa9b348f2019-11-08 13:18:15 -08001041#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001042
Robert Phillipsf4f80112020-07-13 16:13:31 -04001043/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001044sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1045 GrContextOptions defaultOptions;
1046 return MakeMock(mockOptions, defaultOptions);
1047}
1048
1049sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1050 const GrContextOptions& options) {
1051 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1052
1053 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1054 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001055 return nullptr;
1056 }
Chris Daltona378b452019-12-11 13:24:11 -05001057
Robert Phillipsf4f80112020-07-13 16:13:31 -04001058 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001059}
1060
Greg Danielb4d89562018-10-03 18:44:49 +00001061#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001062/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001063sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1064 GrContextOptions defaultOptions;
1065 return MakeVulkan(backendContext, defaultOptions);
1066}
1067
1068sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1069 const GrContextOptions& options) {
1070 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1071
1072 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1073 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001074 return nullptr;
1075 }
1076
Robert Phillipsf4f80112020-07-13 16:13:31 -04001077 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001078}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001079#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001080
1081#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001082/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001083sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001084 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001085 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001086}
1087
Jim Van Verth351c9b52020-11-12 15:21:11 -05001088sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1089 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001090 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001091
Jim Van Verth351c9b52020-11-12 15:21:11 -05001092 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001093 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001094 return nullptr;
1095 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001096
Robert Phillipsf4f80112020-07-13 16:13:31 -04001097 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001098}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001099
1100// deprecated
1101sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1102 GrContextOptions defaultOptions;
1103 return MakeMetal(device, queue, defaultOptions);
1104}
1105
1106// deprecated
1107// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1108sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1109 const GrContextOptions& options) {
1110 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1111 GrMtlBackendContext backendContext = {};
1112 backendContext.fDevice.reset(device);
1113 backendContext.fQueue.reset(queue);
1114
1115 return GrDirectContext::MakeMetal(backendContext, options);
1116}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001117#endif
1118
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001119#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001120/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001121sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1122 GrContextOptions defaultOptions;
1123 return MakeDirect3D(backendContext, defaultOptions);
1124}
1125
1126sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1127 const GrContextOptions& options) {
1128 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1129
1130 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1131 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001132 return nullptr;
1133 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001134
Robert Phillipsf4f80112020-07-13 16:13:31 -04001135 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001136}
1137#endif
1138
Stephen White985741a2019-07-18 11:43:45 -04001139#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001140/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001141sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001142 GrContextOptions defaultOptions;
1143 return MakeDawn(device, defaultOptions);
1144}
1145
Robert Phillipsf4f80112020-07-13 16:13:31 -04001146sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1147 const GrContextOptions& options) {
1148 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001149
Robert Phillipsf4f80112020-07-13 16:13:31 -04001150 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1151 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001152 return nullptr;
1153 }
1154
Robert Phillipsf4f80112020-07-13 16:13:31 -04001155 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001156}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001157
Stephen White985741a2019-07-18 11:43:45 -04001158#endif