blob: f04c3659fc0dd2829b1a29927ba579a56234fc8e [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipse7a959d2021-03-11 14:44:42 -050054GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050055 static std::atomic<uint32_t> nextID{1};
56 uint32_t id;
57 do {
58 id = nextID.fetch_add(1, std::memory_order_relaxed);
59 } while (id == SK_InvalidUniqueID);
60 return DirectContextID(id);
61}
62
Robert Phillipsad248452020-06-30 09:27:52 -040063GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillipsedff4672021-03-11 09:16:25 -050064 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options))
Robert Phillipse7a959d2021-03-11 14:44:42 -050065 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040066}
Robert Phillipsa3457b82018-03-08 11:30:12 -050067
Robert Phillipsad248452020-06-30 09:27:52 -040068GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040069 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040070 // this if-test protects against the case where the context is being destroyed
71 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040072 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040073 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050074 }
Adlai Holler9555f292020-10-09 09:41:14 -040075
Greg Daniela89b4302021-01-29 10:48:40 -050076 // We need to make sure all work is finished on the gpu before we start releasing resources.
77 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
78
Adlai Holler9555f292020-10-09 09:41:14 -040079 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040080
81 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
82 if (fResourceCache) {
83 fResourceCache->releaseAll();
84 }
Brian Salomon91a88f02021-02-04 15:34:32 -050085 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
86 // async pixel result don't try to destroy buffers off thread.
87 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040088}
Robert Phillipsa3457b82018-03-08 11:30:12 -050089
Adlai Holler61a591c2020-10-12 12:38:33 -040090sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
91 return INHERITED::threadSafeProxy();
92}
93
Adlai Hollera7a40442020-10-09 09:49:42 -040094void GrDirectContext::resetGLTextureBindings() {
95 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
96 return;
97 }
98 fGpu->resetTextureBindings();
99}
100
101void GrDirectContext::resetContext(uint32_t state) {
102 ASSERT_SINGLE_OWNER
103 fGpu->markContextDirty(state);
104}
105
Robert Phillipsad248452020-06-30 09:27:52 -0400106void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400107 if (INHERITED::abandoned()) {
108 return;
109 }
110
Robert Phillipsad248452020-06-30 09:27:52 -0400111 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400112
Greg Daniela89b4302021-01-29 10:48:40 -0500113 // We need to make sure all work is finished on the gpu before we start releasing resources.
114 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116 fStrikeCache->freeAll();
117
118 fMappedBufferManager->abandon();
119
120 fResourceProvider->abandon();
121
Robert Phillipseb999bc2020-11-03 08:41:47 -0500122 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400123 fResourceCache->abandonAll();
124
125 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
126
Brian Salomon91a88f02021-02-04 15:34:32 -0500127 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400128 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500129
Robert Phillips079455c2020-08-11 15:18:46 -0400130 if (fSmallPathAtlasMgr) {
131 fSmallPathAtlasMgr->reset();
132 }
Robert Phillipsad248452020-06-30 09:27:52 -0400133 fAtlasManager->freeAll();
134}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500135
Adlai Hollera7a40442020-10-09 09:49:42 -0400136bool GrDirectContext::abandoned() {
137 if (INHERITED::abandoned()) {
138 return true;
139 }
140
141 if (fGpu && fGpu->isDeviceLost()) {
142 this->abandonContext();
143 return true;
144 }
145 return false;
146}
147
Adlai Holler61a591c2020-10-12 12:38:33 -0400148bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
149
Robert Phillipsad248452020-06-30 09:27:52 -0400150void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400151 if (INHERITED::abandoned()) {
152 return;
153 }
154
155 INHERITED::abandonContext();
156
Greg Daniela89b4302021-01-29 10:48:40 -0500157 // We need to make sure all work is finished on the gpu before we start releasing resources.
158 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
159
Adlai Holler61a591c2020-10-12 12:38:33 -0400160 fResourceProvider->abandon();
161
162 // Release all resources in the backend 3D API.
163 fResourceCache->releaseAll();
164
Brian Salomon91a88f02021-02-04 15:34:32 -0500165 // Must be after GrResourceCache::releaseAll().
166 fMappedBufferManager.reset();
167
Adlai Holler61a591c2020-10-12 12:38:33 -0400168 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400169 if (fSmallPathAtlasMgr) {
170 fSmallPathAtlasMgr->reset();
171 }
Robert Phillipsad248452020-06-30 09:27:52 -0400172 fAtlasManager->freeAll();
173}
Robert Phillips6db27c22019-05-01 10:43:56 -0400174
Robert Phillipsad248452020-06-30 09:27:52 -0400175void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400176 ASSERT_SINGLE_OWNER
177
178 if (this->abandoned()) {
179 return;
180 }
181
Robert Phillipsad248452020-06-30 09:27:52 -0400182 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400183 if (fSmallPathAtlasMgr) {
184 fSmallPathAtlasMgr->reset();
185 }
Robert Phillipsad248452020-06-30 09:27:52 -0400186 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500187
Adlai Holler4aa4c602020-10-12 13:58:52 -0400188 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
189 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
190 fStrikeCache->freeAll();
191
192 this->drawingManager()->freeGpuResources();
193
194 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400195}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500196
Robert Phillipsad248452020-06-30 09:27:52 -0400197bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400198 ASSERT_SINGLE_OWNER
199 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400200 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500201 }
202
Robert Phillipsae67c522021-03-03 11:03:38 -0500203 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400204 if (!INHERITED::init()) {
205 return false;
206 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500207
Adlai Holler9555f292020-10-09 09:41:14 -0400208 SkASSERT(this->getTextBlobCache());
209 SkASSERT(this->threadSafeCache());
210
211 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500212 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400213 fResourceCache->setProxyProvider(this->proxyProvider());
214 fResourceCache->setThreadSafeCache(this->threadSafeCache());
215 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
216 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500217 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400218
219 fDidTestPMConversions = false;
220
221 // DDL TODO: we need to think through how the task group & persistent cache
222 // get passed on to/shared between all the DDLRecorders created with this context.
223 if (this->options().fExecutor) {
224 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
225 }
226
227 fPersistentCache = this->options().fPersistentCache;
228 fShaderErrorHandler = this->options().fShaderErrorHandler;
229 if (!fShaderErrorHandler) {
230 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
231 }
232
Robert Phillipsad248452020-06-30 09:27:52 -0400233 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
234 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
235 // multitexturing supported only if range can represent the index + texcoords fully
236 !(this->caps()->shaderCaps()->floatIs32Bits() ||
237 this->caps()->shaderCaps()->integerSupport())) {
238 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
239 } else {
240 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
241 }
242
243 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
244
Robert Phillips3262bc82020-08-10 12:11:58 -0400245 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
246 this->options().fGlyphCacheTextureMaximumBytes,
247 allowMultitexturing);
248 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400249
250 return true;
251}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500252
Adlai Holler3a508e92020-10-12 13:58:01 -0400253void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
254 ASSERT_SINGLE_OWNER
255
256 if (resourceCount) {
257 *resourceCount = fResourceCache->getBudgetedResourceCount();
258 }
259 if (resourceBytes) {
260 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
261 }
262}
263
264size_t GrDirectContext::getResourceCachePurgeableBytes() const {
265 ASSERT_SINGLE_OWNER
266 return fResourceCache->getPurgeableBytes();
267}
268
269void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
270 ASSERT_SINGLE_OWNER
271 if (maxResources) {
272 *maxResources = -1;
273 }
274 if (maxResourceBytes) {
275 *maxResourceBytes = this->getResourceCacheLimit();
276 }
277}
278
279size_t GrDirectContext::getResourceCacheLimit() const {
280 ASSERT_SINGLE_OWNER
281 return fResourceCache->getMaxResourceBytes();
282}
283
284void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
285 ASSERT_SINGLE_OWNER
286 this->setResourceCacheLimit(maxResourceBytes);
287}
288
289void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
290 ASSERT_SINGLE_OWNER
291 fResourceCache->setLimit(maxResourceBytes);
292}
293
Adlai Holler4aa4c602020-10-12 13:58:52 -0400294void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
295 ASSERT_SINGLE_OWNER
296
297 if (this->abandoned()) {
298 return;
299 }
300
301 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
302 fResourceCache->purgeAsNeeded();
303
304 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
305 // place to purge stale blobs
306 this->getTextBlobCache()->purgeStaleBlobs();
307}
308
309void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
310 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
311
312 ASSERT_SINGLE_OWNER
313
314 if (this->abandoned()) {
315 return;
316 }
317
318 this->checkAsyncWorkCompletion();
319 fMappedBufferManager->process();
320 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
321
322 fResourceCache->purgeAsNeeded();
323 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
324
Adlai Holler4aa4c602020-10-12 13:58:52 -0400325 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
326 // place to purge stale blobs
327 this->getTextBlobCache()->purgeStaleBlobs();
328}
329
330void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
331 ASSERT_SINGLE_OWNER
332
333 if (this->abandoned()) {
334 return;
335 }
336
337 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
338}
339
Adlai Holler3acc69a2020-10-13 08:20:51 -0400340////////////////////////////////////////////////////////////////////////////////
341bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
342 bool deleteSemaphoresAfterWait) {
343 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
344 return false;
345 }
346 GrWrapOwnership ownership =
347 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
348 for (int i = 0; i < numSemaphores; ++i) {
349 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
350 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
351 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
352 // to begin with. Therefore, it is fine to not wait on it.
353 if (sema) {
354 fGpu->waitSemaphore(sema.get());
355 }
356 }
357 return true;
358}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400359
Robert Phillips5edf5102020-08-10 16:30:36 -0400360GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400361 if (!fSmallPathAtlasMgr) {
362 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
363
364 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
365 }
366
367 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
368 return nullptr;
369 }
370
371 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400372}
373
Adlai Holler3acc69a2020-10-13 08:20:51 -0400374////////////////////////////////////////////////////////////////////////////////
375
376GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
377 ASSERT_SINGLE_OWNER
378 if (this->abandoned()) {
379 if (info.fFinishedProc) {
380 info.fFinishedProc(info.fFinishedContext);
381 }
382 if (info.fSubmittedProc) {
383 info.fSubmittedProc(info.fSubmittedContext, false);
384 }
385 return GrSemaphoresSubmitted::kNo;
386 }
387
Robert Phillips80bfda82020-11-12 09:23:36 -0500388 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
389 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400390}
391
392bool GrDirectContext::submit(bool syncCpu) {
393 ASSERT_SINGLE_OWNER
394 if (this->abandoned()) {
395 return false;
396 }
397
398 if (!fGpu) {
399 return false;
400 }
401
402 return fGpu->submitToGpu(syncCpu);
403}
404
405////////////////////////////////////////////////////////////////////////////////
406
407void GrDirectContext::checkAsyncWorkCompletion() {
408 if (fGpu) {
409 fGpu->checkFinishProcs();
410 }
411}
412
Greg Daniela89b4302021-01-29 10:48:40 -0500413void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
414 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
415 fGpu->finishOutstandingGpuWork();
416 this->checkAsyncWorkCompletion();
417 }
418}
419
Adlai Holler3acc69a2020-10-13 08:20:51 -0400420////////////////////////////////////////////////////////////////////////////////
421
422void GrDirectContext::storeVkPipelineCacheData() {
423 if (fGpu) {
424 fGpu->storeVkPipelineCacheData();
425 }
426}
427
428////////////////////////////////////////////////////////////////////////////////
429
430bool GrDirectContext::supportsDistanceFieldText() const {
431 return this->caps()->shaderCaps()->supportsDistanceFieldText();
432}
433
434//////////////////////////////////////////////////////////////////////////////
435
436void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
437 ASSERT_SINGLE_OWNER
438 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
439 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
440 this->getTextBlobCache()->usedBytes());
441}
442
Adlai Holler98dd0042020-10-13 10:04:00 -0400443GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
444 const GrBackendFormat& backendFormat,
445 GrMipmapped mipMapped,
446 GrRenderable renderable,
447 GrProtected isProtected) {
448 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
449 if (this->abandoned()) {
450 return GrBackendTexture();
451 }
452
453 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
454 mipMapped, isProtected);
455}
456
457GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
458 SkColorType skColorType,
459 GrMipmapped mipMapped,
460 GrRenderable renderable,
461 GrProtected isProtected) {
462 if (this->abandoned()) {
463 return GrBackendTexture();
464 }
465
466 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
467
468 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
469}
470
471static GrBackendTexture create_and_update_backend_texture(
472 GrDirectContext* dContext,
473 SkISize dimensions,
474 const GrBackendFormat& backendFormat,
475 GrMipmapped mipMapped,
476 GrRenderable renderable,
477 GrProtected isProtected,
478 sk_sp<GrRefCntedCallback> finishedCallback,
479 const GrGpu::BackendTextureData* data) {
480 GrGpu* gpu = dContext->priv().getGpu();
481
482 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
483 mipMapped, isProtected);
484 if (!beTex.isValid()) {
485 return {};
486 }
487
488 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
489 std::move(finishedCallback),
490 data)) {
491 dContext->deleteBackendTexture(beTex);
492 return {};
493 }
494 return beTex;
495}
496
Brian Salomonb5f880a2020-12-07 11:30:16 -0500497static bool update_texture_with_pixmaps(GrGpu* gpu,
498 const SkPixmap* srcData,
499 int numLevels,
500 const GrBackendTexture& backendTexture,
501 GrSurfaceOrigin textureOrigin,
502 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon759217e2021-01-31 13:16:39 -0500503 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
504 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
505
506 size_t size = 0;
507 for (int i = 0; i < numLevels; ++i) {
508 size_t minRowBytes = srcData[i].info().minRowBytes();
509 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
510 size += minRowBytes * srcData[i].height();
511 }
512 }
513
Brian Salomonb5f880a2020-12-07 11:30:16 -0500514 std::unique_ptr<char[]> tempStorage;
Brian Salomon759217e2021-01-31 13:16:39 -0500515 if (size) {
Mike Klein09289632021-01-30 15:51:19 +0000516 tempStorage.reset(new char[size]);
Brian Salomon759217e2021-01-31 13:16:39 -0500517 }
518 size = 0;
519 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
520 for (int i = 0; i < numLevels; ++i) {
521 size_t minRowBytes = srcData[i].info().minRowBytes();
522 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
523 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
524 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
525 size += minRowBytes*srcData[i].height();
526 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500527 tempPixmaps[i] = srcData[i];
528 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500529 }
Brian Salomon759217e2021-01-31 13:16:39 -0500530
Brian Salomon05487ab2020-12-23 20:32:22 -0500531 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500532 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
533}
534
Adlai Holler98dd0042020-10-13 10:04:00 -0400535GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
536 const GrBackendFormat& backendFormat,
537 const SkColor4f& color,
538 GrMipmapped mipMapped,
539 GrRenderable renderable,
540 GrProtected isProtected,
541 GrGpuFinishedProc finishedProc,
542 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500543 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400544
545 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
546 if (this->abandoned()) {
547 return {};
548 }
549
550 GrGpu::BackendTextureData data(color);
551 return create_and_update_backend_texture(this, {width, height},
552 backendFormat, mipMapped, renderable, isProtected,
553 std::move(finishedCallback), &data);
554}
555
556GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
557 SkColorType skColorType,
558 const SkColor4f& color,
559 GrMipmapped mipMapped,
560 GrRenderable renderable,
561 GrProtected isProtected,
562 GrGpuFinishedProc finishedProc,
563 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500564 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400565
566 if (this->abandoned()) {
567 return {};
568 }
569
570 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
571 if (!format.isValid()) {
572 return {};
573 }
574
575 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
576 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
577
578 GrGpu::BackendTextureData data(swizzledColor);
579 return create_and_update_backend_texture(this, {width, height}, format,
580 mipMapped, renderable, isProtected,
581 std::move(finishedCallback), &data);
582}
583
584GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
585 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500586 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400587 GrRenderable renderable,
588 GrProtected isProtected,
589 GrGpuFinishedProc finishedProc,
590 GrGpuFinishedContext finishedContext) {
591 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
592
Brian Salomon694ff172020-11-04 16:54:28 -0500593 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400594
595 if (this->abandoned()) {
596 return {};
597 }
598
599 if (!srcData || numProvidedLevels <= 0) {
600 return {};
601 }
602
603 int baseWidth = srcData[0].width();
604 int baseHeight = srcData[0].height();
605 SkColorType colorType = srcData[0].colorType();
606
607 GrMipmapped mipMapped = GrMipmapped::kNo;
608 int numExpectedLevels = 1;
609 if (numProvidedLevels > 1) {
610 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
611 mipMapped = GrMipmapped::kYes;
612 }
613
614 if (numProvidedLevels != numExpectedLevels) {
615 return {};
616 }
617
618 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500619 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
620 srcData[0].height(),
621 backendFormat,
622 mipMapped,
623 renderable,
624 isProtected);
625 if (!beTex.isValid()) {
626 return {};
627 }
628 if (!update_texture_with_pixmaps(this->priv().getGpu(),
629 srcData,
630 numProvidedLevels,
631 beTex,
632 textureOrigin,
633 std::move(finishedCallback))) {
634 this->deleteBackendTexture(beTex);
635 return {};
636 }
637 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400638}
639
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400640bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
641 const SkColor4f& color,
642 GrGpuFinishedProc finishedProc,
643 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500644 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400645
646 if (this->abandoned()) {
647 return false;
648 }
649
650 GrGpu::BackendTextureData data(color);
651 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
652}
653
654bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
655 SkColorType skColorType,
656 const SkColor4f& color,
657 GrGpuFinishedProc finishedProc,
658 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500659 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400660
661 if (this->abandoned()) {
662 return false;
663 }
664
665 GrBackendFormat format = backendTexture.getBackendFormat();
666 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
667
668 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
669 return false;
670 }
671
672 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
673 GrGpu::BackendTextureData data(swizzle.applyTo(color));
674
675 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
676}
677
678bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
679 const SkPixmap srcData[],
680 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500681 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400682 GrGpuFinishedProc finishedProc,
683 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500684 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400685
686 if (this->abandoned()) {
687 return false;
688 }
689
690 if (!srcData || numLevels <= 0) {
691 return false;
692 }
693
694 int numExpectedLevels = 1;
695 if (backendTexture.hasMipmaps()) {
696 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
697 backendTexture.height()) + 1;
698 }
699 if (numLevels != numExpectedLevels) {
700 return false;
701 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500702 return update_texture_with_pixmaps(fGpu.get(),
703 srcData,
704 numLevels,
705 backendTexture,
706 textureOrigin,
707 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400708}
709
Adlai Holler64e13832020-10-13 08:21:56 -0400710//////////////////////////////////////////////////////////////////////////////
711
712static GrBackendTexture create_and_update_compressed_backend_texture(
713 GrDirectContext* dContext,
714 SkISize dimensions,
715 const GrBackendFormat& backendFormat,
716 GrMipmapped mipMapped,
717 GrProtected isProtected,
718 sk_sp<GrRefCntedCallback> finishedCallback,
719 const GrGpu::BackendTextureData* data) {
720 GrGpu* gpu = dContext->priv().getGpu();
721
722 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
723 mipMapped, isProtected);
724 if (!beTex.isValid()) {
725 return {};
726 }
727
728 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
729 beTex, std::move(finishedCallback), data)) {
730 dContext->deleteBackendTexture(beTex);
731 return {};
732 }
733 return beTex;
734}
735
736GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
737 const GrBackendFormat& backendFormat,
738 const SkColor4f& color,
739 GrMipmapped mipMapped,
740 GrProtected isProtected,
741 GrGpuFinishedProc finishedProc,
742 GrGpuFinishedContext finishedContext) {
743 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500744 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400745
746 if (this->abandoned()) {
747 return {};
748 }
749
750 GrGpu::BackendTextureData data(color);
751 return create_and_update_compressed_backend_texture(this, {width, height},
752 backendFormat, mipMapped, isProtected,
753 std::move(finishedCallback), &data);
754}
755
756GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
757 SkImage::CompressionType compression,
758 const SkColor4f& color,
759 GrMipmapped mipMapped,
760 GrProtected isProtected,
761 GrGpuFinishedProc finishedProc,
762 GrGpuFinishedContext finishedContext) {
763 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
764 GrBackendFormat format = this->compressedBackendFormat(compression);
765 return this->createCompressedBackendTexture(width, height, format, color,
766 mipMapped, isProtected, finishedProc,
767 finishedContext);
768}
769
770GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
771 const GrBackendFormat& backendFormat,
772 const void* compressedData,
773 size_t dataSize,
774 GrMipmapped mipMapped,
775 GrProtected isProtected,
776 GrGpuFinishedProc finishedProc,
777 GrGpuFinishedContext finishedContext) {
778 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500779 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400780
781 if (this->abandoned()) {
782 return {};
783 }
784
785 GrGpu::BackendTextureData data(compressedData, dataSize);
786 return create_and_update_compressed_backend_texture(this, {width, height},
787 backendFormat, mipMapped, isProtected,
788 std::move(finishedCallback), &data);
789}
790
791GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
792 SkImage::CompressionType compression,
793 const void* data, size_t dataSize,
794 GrMipmapped mipMapped,
795 GrProtected isProtected,
796 GrGpuFinishedProc finishedProc,
797 GrGpuFinishedContext finishedContext) {
798 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
799 GrBackendFormat format = this->compressedBackendFormat(compression);
800 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
801 isProtected, finishedProc, finishedContext);
802}
803
804bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
805 const SkColor4f& color,
806 GrGpuFinishedProc finishedProc,
807 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500808 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400809
810 if (this->abandoned()) {
811 return false;
812 }
813
814 GrGpu::BackendTextureData data(color);
815 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
816}
817
818bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
819 const void* compressedData,
820 size_t dataSize,
821 GrGpuFinishedProc finishedProc,
822 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500823 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400824
825 if (this->abandoned()) {
826 return false;
827 }
828
829 if (!compressedData) {
830 return false;
831 }
832
833 GrGpu::BackendTextureData data(compressedData, dataSize);
834
835 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
836}
837
Adlai Holler6d0745b2020-10-13 13:29:00 -0400838//////////////////////////////////////////////////////////////////////////////
839
840bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
841 const GrBackendSurfaceMutableState& state,
842 GrBackendSurfaceMutableState* previousState,
843 GrGpuFinishedProc finishedProc,
844 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500845 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400846
847 if (this->abandoned()) {
848 return false;
849 }
850
851 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
852}
853
854
855bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
856 const GrBackendSurfaceMutableState& state,
857 GrBackendSurfaceMutableState* previousState,
858 GrGpuFinishedProc finishedProc,
859 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500860 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400861
862 if (this->abandoned()) {
863 return false;
864 }
865
866 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
867 std::move(callback));
868}
869
870void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
871 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
872 // For the Vulkan backend we still must destroy the backend texture when the context is
873 // abandoned.
874 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
875 return;
876 }
877
878 fGpu->deleteBackendTexture(backendTex);
879}
880
881//////////////////////////////////////////////////////////////////////////////
882
883bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
884 return fGpu->precompileShader(key, data);
885}
886
887#ifdef SK_ENABLE_DUMP_GPU
888#include "include/core/SkString.h"
889#include "src/utils/SkJSONWriter.h"
890SkString GrDirectContext::dump() const {
891 SkDynamicMemoryWStream stream;
892 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
893 writer.beginObject();
894
895 writer.appendString("backend", GrBackendApiToStr(this->backend()));
896
897 writer.appendName("caps");
898 this->caps()->dumpJSON(&writer);
899
900 writer.appendName("gpu");
901 this->fGpu->dumpJSON(&writer);
902
903 writer.appendName("context");
904 this->dumpJSON(&writer);
905
906 // Flush JSON to the memory stream
907 writer.endObject();
908 writer.flush();
909
910 // Null terminate the JSON data in the memory stream
911 stream.write8(0);
912
913 // Allocate a string big enough to hold all the data, then copy out of the stream
914 SkString result(stream.bytesWritten());
915 stream.copyToAndReset(result.writable_str());
916 return result;
917}
918#endif
919
John Rosascoa9b348f2019-11-08 13:18:15 -0800920#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400921
Robert Phillipsf4f80112020-07-13 16:13:31 -0400922/*************************************************************************************************/
923sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500924 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500925 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500926}
927
Robert Phillipsf4f80112020-07-13 16:13:31 -0400928sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400929 return MakeGL(nullptr, options);
930}
931
Robert Phillipsf4f80112020-07-13 16:13:31 -0400932sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400933 GrContextOptions defaultOptions;
934 return MakeGL(nullptr, defaultOptions);
935}
936
Brian Salomon24069eb2020-06-24 10:19:52 -0400937#if GR_TEST_UTILS
938GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
939 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
940 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
941 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
942 // on the thing it captures. So we leak the context.
943 struct GetErrorContext {
944 SkRandom fRandom;
945 GrGLFunction<GrGLGetErrorFn> fGetError;
946 };
947
948 auto errorContext = new GetErrorContext;
949
950#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
951 __lsan_ignore_object(errorContext);
952#endif
953
954 errorContext->fGetError = original;
955
956 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
957 GrGLenum error = errorContext->fGetError();
958 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
959 error = GR_GL_OUT_OF_MEMORY;
960 }
961 return error;
962 });
963}
964#endif
965
Robert Phillipsf4f80112020-07-13 16:13:31 -0400966sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
967 const GrContextOptions& options) {
968 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400969#if GR_TEST_UTILS
970 if (options.fRandomGLOOM) {
971 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
972 copy->fFunctions.fGetError =
973 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
974#if GR_GL_CHECK_ERROR
975 // Suppress logging GL errors since we'll be synthetically generating them.
976 copy->suppressErrorLogging();
977#endif
978 glInterface = std::move(copy);
979 }
980#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400981 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
982 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500983 return nullptr;
984 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400985 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500986}
John Rosascoa9b348f2019-11-08 13:18:15 -0800987#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500988
Robert Phillipsf4f80112020-07-13 16:13:31 -0400989/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
991 GrContextOptions defaultOptions;
992 return MakeMock(mockOptions, defaultOptions);
993}
994
995sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
996 const GrContextOptions& options) {
997 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
998
999 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1000 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001001 return nullptr;
1002 }
Chris Daltona378b452019-12-11 13:24:11 -05001003
Robert Phillipsf4f80112020-07-13 16:13:31 -04001004 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001005}
1006
Greg Danielb4d89562018-10-03 18:44:49 +00001007#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001008/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001009sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1010 GrContextOptions defaultOptions;
1011 return MakeVulkan(backendContext, defaultOptions);
1012}
1013
1014sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1015 const GrContextOptions& options) {
1016 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1017
1018 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1019 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001020 return nullptr;
1021 }
1022
Robert Phillipsf4f80112020-07-13 16:13:31 -04001023 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001024}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001025#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001026
1027#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001028/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001029sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001030 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001031 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001032}
1033
Jim Van Verth351c9b52020-11-12 15:21:11 -05001034sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1035 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001036 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001037
Jim Van Verth351c9b52020-11-12 15:21:11 -05001038 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001039 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001040 return nullptr;
1041 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001042
Robert Phillipsf4f80112020-07-13 16:13:31 -04001043 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001044}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001045
1046// deprecated
1047sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1048 GrContextOptions defaultOptions;
1049 return MakeMetal(device, queue, defaultOptions);
1050}
1051
1052// deprecated
1053// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1054sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1055 const GrContextOptions& options) {
1056 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1057 GrMtlBackendContext backendContext = {};
1058 backendContext.fDevice.reset(device);
1059 backendContext.fQueue.reset(queue);
1060
1061 return GrDirectContext::MakeMetal(backendContext, options);
1062}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001063#endif
1064
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001065#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001066/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001067sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1068 GrContextOptions defaultOptions;
1069 return MakeDirect3D(backendContext, defaultOptions);
1070}
1071
1072sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1073 const GrContextOptions& options) {
1074 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1075
1076 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1077 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001078 return nullptr;
1079 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001080
Robert Phillipsf4f80112020-07-13 16:13:31 -04001081 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001082}
1083#endif
1084
Stephen White985741a2019-07-18 11:43:45 -04001085#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001086/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001087sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001088 GrContextOptions defaultOptions;
1089 return MakeDawn(device, defaultOptions);
1090}
1091
Robert Phillipsf4f80112020-07-13 16:13:31 -04001092sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1093 const GrContextOptions& options) {
1094 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001095
Robert Phillipsf4f80112020-07-13 16:13:31 -04001096 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1097 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001098 return nullptr;
1099 }
1100
Robert Phillipsf4f80112020-07-13 16:13:31 -04001101 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001102}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001103
Stephen White985741a2019-07-18 11:43:45 -04001104#endif