blob: 96621c665f03fc2ab33c3af14f15376062447655 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipse7a959d2021-03-11 14:44:42 -050054GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050055 static std::atomic<uint32_t> nextID{1};
56 uint32_t id;
57 do {
58 id = nextID.fetch_add(1, std::memory_order_relaxed);
59 } while (id == SK_InvalidUniqueID);
60 return DirectContextID(id);
61}
62
Robert Phillipsad248452020-06-30 09:27:52 -040063GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillipsedff4672021-03-11 09:16:25 -050064 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options))
Robert Phillipse7a959d2021-03-11 14:44:42 -050065 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040066}
Robert Phillipsa3457b82018-03-08 11:30:12 -050067
Robert Phillipsad248452020-06-30 09:27:52 -040068GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040069 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040070 // this if-test protects against the case where the context is being destroyed
71 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040072 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040073 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050074 }
Adlai Holler9555f292020-10-09 09:41:14 -040075
Greg Daniela89b4302021-01-29 10:48:40 -050076 // We need to make sure all work is finished on the gpu before we start releasing resources.
77 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
78
Adlai Holler9555f292020-10-09 09:41:14 -040079 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040080
81 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
82 if (fResourceCache) {
83 fResourceCache->releaseAll();
84 }
Brian Salomon91a88f02021-02-04 15:34:32 -050085 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
86 // async pixel result don't try to destroy buffers off thread.
87 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040088}
Robert Phillipsa3457b82018-03-08 11:30:12 -050089
Adlai Holler61a591c2020-10-12 12:38:33 -040090sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
91 return INHERITED::threadSafeProxy();
92}
93
Adlai Hollera7a40442020-10-09 09:49:42 -040094void GrDirectContext::resetGLTextureBindings() {
95 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
96 return;
97 }
98 fGpu->resetTextureBindings();
99}
100
101void GrDirectContext::resetContext(uint32_t state) {
102 ASSERT_SINGLE_OWNER
103 fGpu->markContextDirty(state);
104}
105
Robert Phillipsad248452020-06-30 09:27:52 -0400106void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400107 if (INHERITED::abandoned()) {
108 return;
109 }
110
Robert Phillipsad248452020-06-30 09:27:52 -0400111 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400112
Greg Daniela89b4302021-01-29 10:48:40 -0500113 // We need to make sure all work is finished on the gpu before we start releasing resources.
114 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116 fStrikeCache->freeAll();
117
118 fMappedBufferManager->abandon();
119
120 fResourceProvider->abandon();
121
Robert Phillipseb999bc2020-11-03 08:41:47 -0500122 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400123 fResourceCache->abandonAll();
124
125 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
126
Brian Salomon91a88f02021-02-04 15:34:32 -0500127 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400128 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500129
Robert Phillips079455c2020-08-11 15:18:46 -0400130 if (fSmallPathAtlasMgr) {
131 fSmallPathAtlasMgr->reset();
132 }
Robert Phillipsad248452020-06-30 09:27:52 -0400133 fAtlasManager->freeAll();
134}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500135
Adlai Hollera7a40442020-10-09 09:49:42 -0400136bool GrDirectContext::abandoned() {
137 if (INHERITED::abandoned()) {
138 return true;
139 }
140
141 if (fGpu && fGpu->isDeviceLost()) {
142 this->abandonContext();
143 return true;
144 }
145 return false;
146}
147
Adlai Holler61a591c2020-10-12 12:38:33 -0400148bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
149
Robert Phillipsad248452020-06-30 09:27:52 -0400150void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400151 if (INHERITED::abandoned()) {
152 return;
153 }
154
155 INHERITED::abandonContext();
156
Greg Daniela89b4302021-01-29 10:48:40 -0500157 // We need to make sure all work is finished on the gpu before we start releasing resources.
158 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
159
Adlai Holler61a591c2020-10-12 12:38:33 -0400160 fResourceProvider->abandon();
161
162 // Release all resources in the backend 3D API.
163 fResourceCache->releaseAll();
164
Brian Salomon91a88f02021-02-04 15:34:32 -0500165 // Must be after GrResourceCache::releaseAll().
166 fMappedBufferManager.reset();
167
Adlai Holler61a591c2020-10-12 12:38:33 -0400168 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400169 if (fSmallPathAtlasMgr) {
170 fSmallPathAtlasMgr->reset();
171 }
Robert Phillipsad248452020-06-30 09:27:52 -0400172 fAtlasManager->freeAll();
173}
Robert Phillips6db27c22019-05-01 10:43:56 -0400174
Robert Phillipsad248452020-06-30 09:27:52 -0400175void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400176 ASSERT_SINGLE_OWNER
177
178 if (this->abandoned()) {
179 return;
180 }
181
Robert Phillipsad248452020-06-30 09:27:52 -0400182 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400183 if (fSmallPathAtlasMgr) {
184 fSmallPathAtlasMgr->reset();
185 }
Robert Phillipsad248452020-06-30 09:27:52 -0400186 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500187
Adlai Holler4aa4c602020-10-12 13:58:52 -0400188 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
189 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
190 fStrikeCache->freeAll();
191
192 this->drawingManager()->freeGpuResources();
193
194 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400195}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500196
Robert Phillipsad248452020-06-30 09:27:52 -0400197bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400198 ASSERT_SINGLE_OWNER
199 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400200 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500201 }
202
Robert Phillipsae67c522021-03-03 11:03:38 -0500203 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400204 if (!INHERITED::init()) {
205 return false;
206 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500207
Adlai Holler9555f292020-10-09 09:41:14 -0400208 SkASSERT(this->getTextBlobCache());
209 SkASSERT(this->threadSafeCache());
210
211 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400212 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
213 this->directContextID(),
214 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400215 fResourceCache->setProxyProvider(this->proxyProvider());
216 fResourceCache->setThreadSafeCache(this->threadSafeCache());
217 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
218 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500219 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400220
221 fDidTestPMConversions = false;
222
223 // DDL TODO: we need to think through how the task group & persistent cache
224 // get passed on to/shared between all the DDLRecorders created with this context.
225 if (this->options().fExecutor) {
226 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
227 }
228
229 fPersistentCache = this->options().fPersistentCache;
230 fShaderErrorHandler = this->options().fShaderErrorHandler;
231 if (!fShaderErrorHandler) {
232 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
233 }
234
Robert Phillipsad248452020-06-30 09:27:52 -0400235 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
236 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
237 // multitexturing supported only if range can represent the index + texcoords fully
238 !(this->caps()->shaderCaps()->floatIs32Bits() ||
239 this->caps()->shaderCaps()->integerSupport())) {
240 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
241 } else {
242 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
243 }
244
245 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
246
Robert Phillips3262bc82020-08-10 12:11:58 -0400247 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
248 this->options().fGlyphCacheTextureMaximumBytes,
249 allowMultitexturing);
250 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400251
252 return true;
253}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500254
Adlai Holler3a508e92020-10-12 13:58:01 -0400255void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
256 ASSERT_SINGLE_OWNER
257
258 if (resourceCount) {
259 *resourceCount = fResourceCache->getBudgetedResourceCount();
260 }
261 if (resourceBytes) {
262 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
263 }
264}
265
266size_t GrDirectContext::getResourceCachePurgeableBytes() const {
267 ASSERT_SINGLE_OWNER
268 return fResourceCache->getPurgeableBytes();
269}
270
271void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
272 ASSERT_SINGLE_OWNER
273 if (maxResources) {
274 *maxResources = -1;
275 }
276 if (maxResourceBytes) {
277 *maxResourceBytes = this->getResourceCacheLimit();
278 }
279}
280
281size_t GrDirectContext::getResourceCacheLimit() const {
282 ASSERT_SINGLE_OWNER
283 return fResourceCache->getMaxResourceBytes();
284}
285
286void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
287 ASSERT_SINGLE_OWNER
288 this->setResourceCacheLimit(maxResourceBytes);
289}
290
291void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
292 ASSERT_SINGLE_OWNER
293 fResourceCache->setLimit(maxResourceBytes);
294}
295
Adlai Holler4aa4c602020-10-12 13:58:52 -0400296void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
297 ASSERT_SINGLE_OWNER
298
299 if (this->abandoned()) {
300 return;
301 }
302
303 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
304 fResourceCache->purgeAsNeeded();
305
306 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
307 // place to purge stale blobs
308 this->getTextBlobCache()->purgeStaleBlobs();
309}
310
311void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
312 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
313
314 ASSERT_SINGLE_OWNER
315
316 if (this->abandoned()) {
317 return;
318 }
319
320 this->checkAsyncWorkCompletion();
321 fMappedBufferManager->process();
322 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
323
324 fResourceCache->purgeAsNeeded();
325 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
326
Adlai Holler4aa4c602020-10-12 13:58:52 -0400327 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
328 // place to purge stale blobs
329 this->getTextBlobCache()->purgeStaleBlobs();
330}
331
332void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
333 ASSERT_SINGLE_OWNER
334
335 if (this->abandoned()) {
336 return;
337 }
338
339 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
340}
341
Adlai Holler3acc69a2020-10-13 08:20:51 -0400342////////////////////////////////////////////////////////////////////////////////
343bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
344 bool deleteSemaphoresAfterWait) {
345 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
346 return false;
347 }
348 GrWrapOwnership ownership =
349 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
350 for (int i = 0; i < numSemaphores; ++i) {
351 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
352 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
353 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
354 // to begin with. Therefore, it is fine to not wait on it.
355 if (sema) {
356 fGpu->waitSemaphore(sema.get());
357 }
358 }
359 return true;
360}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400361
Robert Phillips5edf5102020-08-10 16:30:36 -0400362GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400363 if (!fSmallPathAtlasMgr) {
364 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
365
366 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
367 }
368
369 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
370 return nullptr;
371 }
372
373 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400374}
375
Adlai Holler3acc69a2020-10-13 08:20:51 -0400376////////////////////////////////////////////////////////////////////////////////
377
378GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
379 ASSERT_SINGLE_OWNER
380 if (this->abandoned()) {
381 if (info.fFinishedProc) {
382 info.fFinishedProc(info.fFinishedContext);
383 }
384 if (info.fSubmittedProc) {
385 info.fSubmittedProc(info.fSubmittedContext, false);
386 }
387 return GrSemaphoresSubmitted::kNo;
388 }
389
Robert Phillips80bfda82020-11-12 09:23:36 -0500390 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
391 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400392}
393
394bool GrDirectContext::submit(bool syncCpu) {
395 ASSERT_SINGLE_OWNER
396 if (this->abandoned()) {
397 return false;
398 }
399
400 if (!fGpu) {
401 return false;
402 }
403
404 return fGpu->submitToGpu(syncCpu);
405}
406
407////////////////////////////////////////////////////////////////////////////////
408
409void GrDirectContext::checkAsyncWorkCompletion() {
410 if (fGpu) {
411 fGpu->checkFinishProcs();
412 }
413}
414
Greg Daniela89b4302021-01-29 10:48:40 -0500415void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
416 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
417 fGpu->finishOutstandingGpuWork();
418 this->checkAsyncWorkCompletion();
419 }
420}
421
Adlai Holler3acc69a2020-10-13 08:20:51 -0400422////////////////////////////////////////////////////////////////////////////////
423
424void GrDirectContext::storeVkPipelineCacheData() {
425 if (fGpu) {
426 fGpu->storeVkPipelineCacheData();
427 }
428}
429
430////////////////////////////////////////////////////////////////////////////////
431
432bool GrDirectContext::supportsDistanceFieldText() const {
433 return this->caps()->shaderCaps()->supportsDistanceFieldText();
434}
435
436//////////////////////////////////////////////////////////////////////////////
437
438void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
439 ASSERT_SINGLE_OWNER
440 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
441 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
442 this->getTextBlobCache()->usedBytes());
443}
444
Adlai Holler98dd0042020-10-13 10:04:00 -0400445GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
446 const GrBackendFormat& backendFormat,
447 GrMipmapped mipMapped,
448 GrRenderable renderable,
449 GrProtected isProtected) {
450 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
451 if (this->abandoned()) {
452 return GrBackendTexture();
453 }
454
455 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
456 mipMapped, isProtected);
457}
458
459GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
460 SkColorType skColorType,
461 GrMipmapped mipMapped,
462 GrRenderable renderable,
463 GrProtected isProtected) {
464 if (this->abandoned()) {
465 return GrBackendTexture();
466 }
467
468 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
469
470 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
471}
472
473static GrBackendTexture create_and_update_backend_texture(
474 GrDirectContext* dContext,
475 SkISize dimensions,
476 const GrBackendFormat& backendFormat,
477 GrMipmapped mipMapped,
478 GrRenderable renderable,
479 GrProtected isProtected,
480 sk_sp<GrRefCntedCallback> finishedCallback,
481 const GrGpu::BackendTextureData* data) {
482 GrGpu* gpu = dContext->priv().getGpu();
483
484 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
485 mipMapped, isProtected);
486 if (!beTex.isValid()) {
487 return {};
488 }
489
490 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
491 std::move(finishedCallback),
492 data)) {
493 dContext->deleteBackendTexture(beTex);
494 return {};
495 }
496 return beTex;
497}
498
Brian Salomonb5f880a2020-12-07 11:30:16 -0500499static bool update_texture_with_pixmaps(GrGpu* gpu,
500 const SkPixmap* srcData,
501 int numLevels,
502 const GrBackendTexture& backendTexture,
503 GrSurfaceOrigin textureOrigin,
504 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon759217e2021-01-31 13:16:39 -0500505 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
506 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
507
508 size_t size = 0;
509 for (int i = 0; i < numLevels; ++i) {
510 size_t minRowBytes = srcData[i].info().minRowBytes();
511 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
512 size += minRowBytes * srcData[i].height();
513 }
514 }
515
Brian Salomonb5f880a2020-12-07 11:30:16 -0500516 std::unique_ptr<char[]> tempStorage;
Brian Salomon759217e2021-01-31 13:16:39 -0500517 if (size) {
Mike Klein09289632021-01-30 15:51:19 +0000518 tempStorage.reset(new char[size]);
Brian Salomon759217e2021-01-31 13:16:39 -0500519 }
520 size = 0;
521 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
522 for (int i = 0; i < numLevels; ++i) {
523 size_t minRowBytes = srcData[i].info().minRowBytes();
524 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
525 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
526 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
527 size += minRowBytes*srcData[i].height();
528 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500529 tempPixmaps[i] = srcData[i];
530 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500531 }
Brian Salomon759217e2021-01-31 13:16:39 -0500532
Brian Salomon05487ab2020-12-23 20:32:22 -0500533 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500534 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
535}
536
Adlai Holler98dd0042020-10-13 10:04:00 -0400537GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
538 const GrBackendFormat& backendFormat,
539 const SkColor4f& color,
540 GrMipmapped mipMapped,
541 GrRenderable renderable,
542 GrProtected isProtected,
543 GrGpuFinishedProc finishedProc,
544 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500545 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400546
547 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
548 if (this->abandoned()) {
549 return {};
550 }
551
552 GrGpu::BackendTextureData data(color);
553 return create_and_update_backend_texture(this, {width, height},
554 backendFormat, mipMapped, renderable, isProtected,
555 std::move(finishedCallback), &data);
556}
557
558GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
559 SkColorType skColorType,
560 const SkColor4f& color,
561 GrMipmapped mipMapped,
562 GrRenderable renderable,
563 GrProtected isProtected,
564 GrGpuFinishedProc finishedProc,
565 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500566 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400567
568 if (this->abandoned()) {
569 return {};
570 }
571
572 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
573 if (!format.isValid()) {
574 return {};
575 }
576
577 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
578 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
579
580 GrGpu::BackendTextureData data(swizzledColor);
581 return create_and_update_backend_texture(this, {width, height}, format,
582 mipMapped, renderable, isProtected,
583 std::move(finishedCallback), &data);
584}
585
586GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
587 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500588 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400589 GrRenderable renderable,
590 GrProtected isProtected,
591 GrGpuFinishedProc finishedProc,
592 GrGpuFinishedContext finishedContext) {
593 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
594
Brian Salomon694ff172020-11-04 16:54:28 -0500595 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400596
597 if (this->abandoned()) {
598 return {};
599 }
600
601 if (!srcData || numProvidedLevels <= 0) {
602 return {};
603 }
604
605 int baseWidth = srcData[0].width();
606 int baseHeight = srcData[0].height();
607 SkColorType colorType = srcData[0].colorType();
608
609 GrMipmapped mipMapped = GrMipmapped::kNo;
610 int numExpectedLevels = 1;
611 if (numProvidedLevels > 1) {
612 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
613 mipMapped = GrMipmapped::kYes;
614 }
615
616 if (numProvidedLevels != numExpectedLevels) {
617 return {};
618 }
619
620 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500621 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
622 srcData[0].height(),
623 backendFormat,
624 mipMapped,
625 renderable,
626 isProtected);
627 if (!beTex.isValid()) {
628 return {};
629 }
630 if (!update_texture_with_pixmaps(this->priv().getGpu(),
631 srcData,
632 numProvidedLevels,
633 beTex,
634 textureOrigin,
635 std::move(finishedCallback))) {
636 this->deleteBackendTexture(beTex);
637 return {};
638 }
639 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400640}
641
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400642bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
643 const SkColor4f& color,
644 GrGpuFinishedProc finishedProc,
645 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500646 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400647
648 if (this->abandoned()) {
649 return false;
650 }
651
652 GrGpu::BackendTextureData data(color);
653 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
654}
655
656bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
657 SkColorType skColorType,
658 const SkColor4f& color,
659 GrGpuFinishedProc finishedProc,
660 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500661 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400662
663 if (this->abandoned()) {
664 return false;
665 }
666
667 GrBackendFormat format = backendTexture.getBackendFormat();
668 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
669
670 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
671 return false;
672 }
673
674 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
675 GrGpu::BackendTextureData data(swizzle.applyTo(color));
676
677 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
678}
679
680bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
681 const SkPixmap srcData[],
682 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500683 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400684 GrGpuFinishedProc finishedProc,
685 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500686 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400687
688 if (this->abandoned()) {
689 return false;
690 }
691
692 if (!srcData || numLevels <= 0) {
693 return false;
694 }
695
696 int numExpectedLevels = 1;
697 if (backendTexture.hasMipmaps()) {
698 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
699 backendTexture.height()) + 1;
700 }
701 if (numLevels != numExpectedLevels) {
702 return false;
703 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500704 return update_texture_with_pixmaps(fGpu.get(),
705 srcData,
706 numLevels,
707 backendTexture,
708 textureOrigin,
709 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400710}
711
Adlai Holler64e13832020-10-13 08:21:56 -0400712//////////////////////////////////////////////////////////////////////////////
713
714static GrBackendTexture create_and_update_compressed_backend_texture(
715 GrDirectContext* dContext,
716 SkISize dimensions,
717 const GrBackendFormat& backendFormat,
718 GrMipmapped mipMapped,
719 GrProtected isProtected,
720 sk_sp<GrRefCntedCallback> finishedCallback,
721 const GrGpu::BackendTextureData* data) {
722 GrGpu* gpu = dContext->priv().getGpu();
723
724 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
725 mipMapped, isProtected);
726 if (!beTex.isValid()) {
727 return {};
728 }
729
730 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
731 beTex, std::move(finishedCallback), data)) {
732 dContext->deleteBackendTexture(beTex);
733 return {};
734 }
735 return beTex;
736}
737
738GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
739 const GrBackendFormat& backendFormat,
740 const SkColor4f& color,
741 GrMipmapped mipMapped,
742 GrProtected isProtected,
743 GrGpuFinishedProc finishedProc,
744 GrGpuFinishedContext finishedContext) {
745 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500746 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400747
748 if (this->abandoned()) {
749 return {};
750 }
751
752 GrGpu::BackendTextureData data(color);
753 return create_and_update_compressed_backend_texture(this, {width, height},
754 backendFormat, mipMapped, isProtected,
755 std::move(finishedCallback), &data);
756}
757
758GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
759 SkImage::CompressionType compression,
760 const SkColor4f& color,
761 GrMipmapped mipMapped,
762 GrProtected isProtected,
763 GrGpuFinishedProc finishedProc,
764 GrGpuFinishedContext finishedContext) {
765 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
766 GrBackendFormat format = this->compressedBackendFormat(compression);
767 return this->createCompressedBackendTexture(width, height, format, color,
768 mipMapped, isProtected, finishedProc,
769 finishedContext);
770}
771
772GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
773 const GrBackendFormat& backendFormat,
774 const void* compressedData,
775 size_t dataSize,
776 GrMipmapped mipMapped,
777 GrProtected isProtected,
778 GrGpuFinishedProc finishedProc,
779 GrGpuFinishedContext finishedContext) {
780 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500781 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400782
783 if (this->abandoned()) {
784 return {};
785 }
786
787 GrGpu::BackendTextureData data(compressedData, dataSize);
788 return create_and_update_compressed_backend_texture(this, {width, height},
789 backendFormat, mipMapped, isProtected,
790 std::move(finishedCallback), &data);
791}
792
793GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
794 SkImage::CompressionType compression,
795 const void* data, size_t dataSize,
796 GrMipmapped mipMapped,
797 GrProtected isProtected,
798 GrGpuFinishedProc finishedProc,
799 GrGpuFinishedContext finishedContext) {
800 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
801 GrBackendFormat format = this->compressedBackendFormat(compression);
802 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
803 isProtected, finishedProc, finishedContext);
804}
805
806bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
807 const SkColor4f& color,
808 GrGpuFinishedProc finishedProc,
809 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500810 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400811
812 if (this->abandoned()) {
813 return false;
814 }
815
816 GrGpu::BackendTextureData data(color);
817 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
818}
819
820bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
821 const void* compressedData,
822 size_t dataSize,
823 GrGpuFinishedProc finishedProc,
824 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500825 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400826
827 if (this->abandoned()) {
828 return false;
829 }
830
831 if (!compressedData) {
832 return false;
833 }
834
835 GrGpu::BackendTextureData data(compressedData, dataSize);
836
837 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
838}
839
Adlai Holler6d0745b2020-10-13 13:29:00 -0400840//////////////////////////////////////////////////////////////////////////////
841
842bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
843 const GrBackendSurfaceMutableState& state,
844 GrBackendSurfaceMutableState* previousState,
845 GrGpuFinishedProc finishedProc,
846 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500847 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400848
849 if (this->abandoned()) {
850 return false;
851 }
852
853 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
854}
855
856
857bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
858 const GrBackendSurfaceMutableState& state,
859 GrBackendSurfaceMutableState* previousState,
860 GrGpuFinishedProc finishedProc,
861 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500862 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400863
864 if (this->abandoned()) {
865 return false;
866 }
867
868 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
869 std::move(callback));
870}
871
872void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
873 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
874 // For the Vulkan backend we still must destroy the backend texture when the context is
875 // abandoned.
876 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
877 return;
878 }
879
880 fGpu->deleteBackendTexture(backendTex);
881}
882
883//////////////////////////////////////////////////////////////////////////////
884
885bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
886 return fGpu->precompileShader(key, data);
887}
888
889#ifdef SK_ENABLE_DUMP_GPU
890#include "include/core/SkString.h"
891#include "src/utils/SkJSONWriter.h"
892SkString GrDirectContext::dump() const {
893 SkDynamicMemoryWStream stream;
894 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
895 writer.beginObject();
896
897 writer.appendString("backend", GrBackendApiToStr(this->backend()));
898
899 writer.appendName("caps");
900 this->caps()->dumpJSON(&writer);
901
902 writer.appendName("gpu");
903 this->fGpu->dumpJSON(&writer);
904
905 writer.appendName("context");
906 this->dumpJSON(&writer);
907
908 // Flush JSON to the memory stream
909 writer.endObject();
910 writer.flush();
911
912 // Null terminate the JSON data in the memory stream
913 stream.write8(0);
914
915 // Allocate a string big enough to hold all the data, then copy out of the stream
916 SkString result(stream.bytesWritten());
917 stream.copyToAndReset(result.writable_str());
918 return result;
919}
920#endif
921
John Rosascoa9b348f2019-11-08 13:18:15 -0800922#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400923
Robert Phillipsf4f80112020-07-13 16:13:31 -0400924/*************************************************************************************************/
925sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500926 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500927 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500928}
929
Robert Phillipsf4f80112020-07-13 16:13:31 -0400930sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400931 return MakeGL(nullptr, options);
932}
933
Robert Phillipsf4f80112020-07-13 16:13:31 -0400934sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400935 GrContextOptions defaultOptions;
936 return MakeGL(nullptr, defaultOptions);
937}
938
Brian Salomon24069eb2020-06-24 10:19:52 -0400939#if GR_TEST_UTILS
940GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
941 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
942 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
943 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
944 // on the thing it captures. So we leak the context.
945 struct GetErrorContext {
946 SkRandom fRandom;
947 GrGLFunction<GrGLGetErrorFn> fGetError;
948 };
949
950 auto errorContext = new GetErrorContext;
951
952#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
953 __lsan_ignore_object(errorContext);
954#endif
955
956 errorContext->fGetError = original;
957
958 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
959 GrGLenum error = errorContext->fGetError();
960 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
961 error = GR_GL_OUT_OF_MEMORY;
962 }
963 return error;
964 });
965}
966#endif
967
Robert Phillipsf4f80112020-07-13 16:13:31 -0400968sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
969 const GrContextOptions& options) {
970 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400971#if GR_TEST_UTILS
972 if (options.fRandomGLOOM) {
973 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
974 copy->fFunctions.fGetError =
975 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
976#if GR_GL_CHECK_ERROR
977 // Suppress logging GL errors since we'll be synthetically generating them.
978 copy->suppressErrorLogging();
979#endif
980 glInterface = std::move(copy);
981 }
982#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400983 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
984 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500985 return nullptr;
986 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400987 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500988}
John Rosascoa9b348f2019-11-08 13:18:15 -0800989#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500990
Robert Phillipsf4f80112020-07-13 16:13:31 -0400991/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400992sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
993 GrContextOptions defaultOptions;
994 return MakeMock(mockOptions, defaultOptions);
995}
996
997sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
998 const GrContextOptions& options) {
999 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1000
1001 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1002 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001003 return nullptr;
1004 }
Chris Daltona378b452019-12-11 13:24:11 -05001005
Robert Phillipsf4f80112020-07-13 16:13:31 -04001006 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001007}
1008
Greg Danielb4d89562018-10-03 18:44:49 +00001009#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001010/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001011sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1012 GrContextOptions defaultOptions;
1013 return MakeVulkan(backendContext, defaultOptions);
1014}
1015
1016sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1017 const GrContextOptions& options) {
1018 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1019
1020 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1021 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001022 return nullptr;
1023 }
1024
Robert Phillipsf4f80112020-07-13 16:13:31 -04001025 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001026}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001027#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001028
1029#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001030/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001031sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001032 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001033 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001034}
1035
Jim Van Verth351c9b52020-11-12 15:21:11 -05001036sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1037 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001038 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001039
Jim Van Verth351c9b52020-11-12 15:21:11 -05001040 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001041 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001042 return nullptr;
1043 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001044
Robert Phillipsf4f80112020-07-13 16:13:31 -04001045 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001046}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001047
1048// deprecated
1049sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1050 GrContextOptions defaultOptions;
1051 return MakeMetal(device, queue, defaultOptions);
1052}
1053
1054// deprecated
1055// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1056sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1057 const GrContextOptions& options) {
1058 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1059 GrMtlBackendContext backendContext = {};
1060 backendContext.fDevice.reset(device);
1061 backendContext.fQueue.reset(queue);
1062
1063 return GrDirectContext::MakeMetal(backendContext, options);
1064}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001065#endif
1066
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001067#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001068/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001069sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1070 GrContextOptions defaultOptions;
1071 return MakeDirect3D(backendContext, defaultOptions);
1072}
1073
1074sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1075 const GrContextOptions& options) {
1076 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1077
1078 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1079 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001080 return nullptr;
1081 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001082
Robert Phillipsf4f80112020-07-13 16:13:31 -04001083 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001084}
1085#endif
1086
Stephen White985741a2019-07-18 11:43:45 -04001087#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001088/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001089sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001090 GrContextOptions defaultOptions;
1091 return MakeDawn(device, defaultOptions);
1092}
1093
Robert Phillipsf4f80112020-07-13 16:13:31 -04001094sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1095 const GrContextOptions& options) {
1096 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001097
Robert Phillipsf4f80112020-07-13 16:13:31 -04001098 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1099 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001100 return nullptr;
1101 }
1102
Robert Phillipsf4f80112020-07-13 16:13:31 -04001103 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001104}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001105
Stephen White985741a2019-07-18 11:43:45 -04001106#endif