blob: 19722bbf65b9fdd603bfa6558be4e539b3a55b04 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipse7a959d2021-03-11 14:44:42 -050054GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050055 static std::atomic<uint32_t> nextID{1};
56 uint32_t id;
57 do {
58 id = nextID.fetch_add(1, std::memory_order_relaxed);
59 } while (id == SK_InvalidUniqueID);
60 return DirectContextID(id);
61}
62
Robert Phillipsad248452020-06-30 09:27:52 -040063GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillipsedff4672021-03-11 09:16:25 -050064 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options))
Robert Phillipse7a959d2021-03-11 14:44:42 -050065 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040066}
Robert Phillipsa3457b82018-03-08 11:30:12 -050067
Robert Phillipsad248452020-06-30 09:27:52 -040068GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040069 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040070 // this if-test protects against the case where the context is being destroyed
71 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040072 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040073 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050074 }
Adlai Holler9555f292020-10-09 09:41:14 -040075
Greg Daniela89b4302021-01-29 10:48:40 -050076 // We need to make sure all work is finished on the gpu before we start releasing resources.
77 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
78
Adlai Holler9555f292020-10-09 09:41:14 -040079 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040080
81 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
82 if (fResourceCache) {
83 fResourceCache->releaseAll();
84 }
Brian Salomon91a88f02021-02-04 15:34:32 -050085 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
86 // async pixel result don't try to destroy buffers off thread.
87 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040088}
Robert Phillipsa3457b82018-03-08 11:30:12 -050089
Adlai Holler61a591c2020-10-12 12:38:33 -040090sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
91 return INHERITED::threadSafeProxy();
92}
93
Adlai Hollera7a40442020-10-09 09:49:42 -040094void GrDirectContext::resetGLTextureBindings() {
95 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
96 return;
97 }
98 fGpu->resetTextureBindings();
99}
100
101void GrDirectContext::resetContext(uint32_t state) {
102 ASSERT_SINGLE_OWNER
103 fGpu->markContextDirty(state);
104}
105
Robert Phillipsad248452020-06-30 09:27:52 -0400106void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400107 if (INHERITED::abandoned()) {
108 return;
109 }
110
Robert Phillipsad248452020-06-30 09:27:52 -0400111 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400112
Greg Daniela89b4302021-01-29 10:48:40 -0500113 // We need to make sure all work is finished on the gpu before we start releasing resources.
114 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116 fStrikeCache->freeAll();
117
118 fMappedBufferManager->abandon();
119
120 fResourceProvider->abandon();
121
Robert Phillipseb999bc2020-11-03 08:41:47 -0500122 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400123 fResourceCache->abandonAll();
124
125 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
126
Brian Salomon91a88f02021-02-04 15:34:32 -0500127 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400128 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500129
Robert Phillips079455c2020-08-11 15:18:46 -0400130 if (fSmallPathAtlasMgr) {
131 fSmallPathAtlasMgr->reset();
132 }
Robert Phillipsad248452020-06-30 09:27:52 -0400133 fAtlasManager->freeAll();
134}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500135
Adlai Hollera7a40442020-10-09 09:49:42 -0400136bool GrDirectContext::abandoned() {
137 if (INHERITED::abandoned()) {
138 return true;
139 }
140
141 if (fGpu && fGpu->isDeviceLost()) {
142 this->abandonContext();
143 return true;
144 }
145 return false;
146}
147
Adlai Holler61a591c2020-10-12 12:38:33 -0400148bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
149
Robert Phillipsad248452020-06-30 09:27:52 -0400150void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400151 if (INHERITED::abandoned()) {
152 return;
153 }
154
155 INHERITED::abandonContext();
156
Greg Daniela89b4302021-01-29 10:48:40 -0500157 // We need to make sure all work is finished on the gpu before we start releasing resources.
158 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
159
Adlai Holler61a591c2020-10-12 12:38:33 -0400160 fResourceProvider->abandon();
161
162 // Release all resources in the backend 3D API.
163 fResourceCache->releaseAll();
164
Brian Salomon91a88f02021-02-04 15:34:32 -0500165 // Must be after GrResourceCache::releaseAll().
166 fMappedBufferManager.reset();
167
Adlai Holler61a591c2020-10-12 12:38:33 -0400168 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400169 if (fSmallPathAtlasMgr) {
170 fSmallPathAtlasMgr->reset();
171 }
Robert Phillipsad248452020-06-30 09:27:52 -0400172 fAtlasManager->freeAll();
173}
Robert Phillips6db27c22019-05-01 10:43:56 -0400174
Robert Phillipsad248452020-06-30 09:27:52 -0400175void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400176 ASSERT_SINGLE_OWNER
177
178 if (this->abandoned()) {
179 return;
180 }
181
Robert Phillipsad248452020-06-30 09:27:52 -0400182 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400183 if (fSmallPathAtlasMgr) {
184 fSmallPathAtlasMgr->reset();
185 }
Robert Phillipsad248452020-06-30 09:27:52 -0400186 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500187
Adlai Holler4aa4c602020-10-12 13:58:52 -0400188 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
189 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
190 fStrikeCache->freeAll();
191
192 this->drawingManager()->freeGpuResources();
193
194 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400195}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500196
Robert Phillipsad248452020-06-30 09:27:52 -0400197bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400198 ASSERT_SINGLE_OWNER
199 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400200 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500201 }
202
Robert Phillipsae67c522021-03-03 11:03:38 -0500203 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400204 if (!INHERITED::init()) {
205 return false;
206 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500207
Adlai Holler9555f292020-10-09 09:41:14 -0400208 SkASSERT(this->getTextBlobCache());
209 SkASSERT(this->threadSafeCache());
210
211 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500212 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400213 fResourceCache->setProxyProvider(this->proxyProvider());
214 fResourceCache->setThreadSafeCache(this->threadSafeCache());
215 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
216 this->singleOwner());
217 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
218
219 fDidTestPMConversions = false;
220
221 // DDL TODO: we need to think through how the task group & persistent cache
222 // get passed on to/shared between all the DDLRecorders created with this context.
223 if (this->options().fExecutor) {
224 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
225 }
226
227 fPersistentCache = this->options().fPersistentCache;
228 fShaderErrorHandler = this->options().fShaderErrorHandler;
229 if (!fShaderErrorHandler) {
230 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
231 }
232
Robert Phillipsad248452020-06-30 09:27:52 -0400233 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
234 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
235 // multitexturing supported only if range can represent the index + texcoords fully
236 !(this->caps()->shaderCaps()->floatIs32Bits() ||
237 this->caps()->shaderCaps()->integerSupport())) {
238 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
239 } else {
240 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
241 }
242
243 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
244
Robert Phillips3262bc82020-08-10 12:11:58 -0400245 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
246 this->options().fGlyphCacheTextureMaximumBytes,
247 allowMultitexturing);
248 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400249
250 return true;
251}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500252
Adlai Holler3a508e92020-10-12 13:58:01 -0400253void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
254 ASSERT_SINGLE_OWNER
255
256 if (resourceCount) {
257 *resourceCount = fResourceCache->getBudgetedResourceCount();
258 }
259 if (resourceBytes) {
260 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
261 }
262}
263
264size_t GrDirectContext::getResourceCachePurgeableBytes() const {
265 ASSERT_SINGLE_OWNER
266 return fResourceCache->getPurgeableBytes();
267}
268
269void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
270 ASSERT_SINGLE_OWNER
271 if (maxResources) {
272 *maxResources = -1;
273 }
274 if (maxResourceBytes) {
275 *maxResourceBytes = this->getResourceCacheLimit();
276 }
277}
278
279size_t GrDirectContext::getResourceCacheLimit() const {
280 ASSERT_SINGLE_OWNER
281 return fResourceCache->getMaxResourceBytes();
282}
283
284void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
285 ASSERT_SINGLE_OWNER
286 this->setResourceCacheLimit(maxResourceBytes);
287}
288
289void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
290 ASSERT_SINGLE_OWNER
291 fResourceCache->setLimit(maxResourceBytes);
292}
293
Adlai Holler4aa4c602020-10-12 13:58:52 -0400294void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
295 ASSERT_SINGLE_OWNER
296
297 if (this->abandoned()) {
298 return;
299 }
300
301 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
302 fResourceCache->purgeAsNeeded();
303
304 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
305 // place to purge stale blobs
306 this->getTextBlobCache()->purgeStaleBlobs();
307}
308
309void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
310 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
311
312 ASSERT_SINGLE_OWNER
313
314 if (this->abandoned()) {
315 return;
316 }
317
318 this->checkAsyncWorkCompletion();
319 fMappedBufferManager->process();
320 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
321
322 fResourceCache->purgeAsNeeded();
323 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
324
325 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
326 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
327 }
328
329 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
330 // place to purge stale blobs
331 this->getTextBlobCache()->purgeStaleBlobs();
332}
333
334void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
335 ASSERT_SINGLE_OWNER
336
337 if (this->abandoned()) {
338 return;
339 }
340
341 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
342}
343
Adlai Holler3acc69a2020-10-13 08:20:51 -0400344////////////////////////////////////////////////////////////////////////////////
345bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
346 bool deleteSemaphoresAfterWait) {
347 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
348 return false;
349 }
350 GrWrapOwnership ownership =
351 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
352 for (int i = 0; i < numSemaphores; ++i) {
353 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
354 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
355 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
356 // to begin with. Therefore, it is fine to not wait on it.
357 if (sema) {
358 fGpu->waitSemaphore(sema.get());
359 }
360 }
361 return true;
362}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400363
Robert Phillips5edf5102020-08-10 16:30:36 -0400364GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400365 if (!fSmallPathAtlasMgr) {
366 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
367
368 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
369 }
370
371 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
372 return nullptr;
373 }
374
375 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400376}
377
Adlai Holler3acc69a2020-10-13 08:20:51 -0400378////////////////////////////////////////////////////////////////////////////////
379
380GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
381 ASSERT_SINGLE_OWNER
382 if (this->abandoned()) {
383 if (info.fFinishedProc) {
384 info.fFinishedProc(info.fFinishedContext);
385 }
386 if (info.fSubmittedProc) {
387 info.fSubmittedProc(info.fSubmittedContext, false);
388 }
389 return GrSemaphoresSubmitted::kNo;
390 }
391
Robert Phillips80bfda82020-11-12 09:23:36 -0500392 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
393 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400394}
395
396bool GrDirectContext::submit(bool syncCpu) {
397 ASSERT_SINGLE_OWNER
398 if (this->abandoned()) {
399 return false;
400 }
401
402 if (!fGpu) {
403 return false;
404 }
405
406 return fGpu->submitToGpu(syncCpu);
407}
408
409////////////////////////////////////////////////////////////////////////////////
410
411void GrDirectContext::checkAsyncWorkCompletion() {
412 if (fGpu) {
413 fGpu->checkFinishProcs();
414 }
415}
416
Greg Daniela89b4302021-01-29 10:48:40 -0500417void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
418 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
419 fGpu->finishOutstandingGpuWork();
420 this->checkAsyncWorkCompletion();
421 }
422}
423
Adlai Holler3acc69a2020-10-13 08:20:51 -0400424////////////////////////////////////////////////////////////////////////////////
425
426void GrDirectContext::storeVkPipelineCacheData() {
427 if (fGpu) {
428 fGpu->storeVkPipelineCacheData();
429 }
430}
431
432////////////////////////////////////////////////////////////////////////////////
433
434bool GrDirectContext::supportsDistanceFieldText() const {
435 return this->caps()->shaderCaps()->supportsDistanceFieldText();
436}
437
438//////////////////////////////////////////////////////////////////////////////
439
440void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
441 ASSERT_SINGLE_OWNER
442 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
443 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
444 this->getTextBlobCache()->usedBytes());
445}
446
Adlai Holler98dd0042020-10-13 10:04:00 -0400447GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
448 const GrBackendFormat& backendFormat,
449 GrMipmapped mipMapped,
450 GrRenderable renderable,
451 GrProtected isProtected) {
452 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
453 if (this->abandoned()) {
454 return GrBackendTexture();
455 }
456
457 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
458 mipMapped, isProtected);
459}
460
461GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
462 SkColorType skColorType,
463 GrMipmapped mipMapped,
464 GrRenderable renderable,
465 GrProtected isProtected) {
466 if (this->abandoned()) {
467 return GrBackendTexture();
468 }
469
470 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
471
472 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
473}
474
475static GrBackendTexture create_and_update_backend_texture(
476 GrDirectContext* dContext,
477 SkISize dimensions,
478 const GrBackendFormat& backendFormat,
479 GrMipmapped mipMapped,
480 GrRenderable renderable,
481 GrProtected isProtected,
482 sk_sp<GrRefCntedCallback> finishedCallback,
483 const GrGpu::BackendTextureData* data) {
484 GrGpu* gpu = dContext->priv().getGpu();
485
486 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
487 mipMapped, isProtected);
488 if (!beTex.isValid()) {
489 return {};
490 }
491
492 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
493 std::move(finishedCallback),
494 data)) {
495 dContext->deleteBackendTexture(beTex);
496 return {};
497 }
498 return beTex;
499}
500
Brian Salomonb5f880a2020-12-07 11:30:16 -0500501static bool update_texture_with_pixmaps(GrGpu* gpu,
502 const SkPixmap* srcData,
503 int numLevels,
504 const GrBackendTexture& backendTexture,
505 GrSurfaceOrigin textureOrigin,
506 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon759217e2021-01-31 13:16:39 -0500507 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
508 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
509
510 size_t size = 0;
511 for (int i = 0; i < numLevels; ++i) {
512 size_t minRowBytes = srcData[i].info().minRowBytes();
513 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
514 size += minRowBytes * srcData[i].height();
515 }
516 }
517
Brian Salomonb5f880a2020-12-07 11:30:16 -0500518 std::unique_ptr<char[]> tempStorage;
Brian Salomon759217e2021-01-31 13:16:39 -0500519 if (size) {
Mike Klein09289632021-01-30 15:51:19 +0000520 tempStorage.reset(new char[size]);
Brian Salomon759217e2021-01-31 13:16:39 -0500521 }
522 size = 0;
523 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
524 for (int i = 0; i < numLevels; ++i) {
525 size_t minRowBytes = srcData[i].info().minRowBytes();
526 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
527 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
528 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
529 size += minRowBytes*srcData[i].height();
530 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500531 tempPixmaps[i] = srcData[i];
532 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500533 }
Brian Salomon759217e2021-01-31 13:16:39 -0500534
Brian Salomon05487ab2020-12-23 20:32:22 -0500535 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500536 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
537}
538
Adlai Holler98dd0042020-10-13 10:04:00 -0400539GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
540 const GrBackendFormat& backendFormat,
541 const SkColor4f& color,
542 GrMipmapped mipMapped,
543 GrRenderable renderable,
544 GrProtected isProtected,
545 GrGpuFinishedProc finishedProc,
546 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500547 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400548
549 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
550 if (this->abandoned()) {
551 return {};
552 }
553
554 GrGpu::BackendTextureData data(color);
555 return create_and_update_backend_texture(this, {width, height},
556 backendFormat, mipMapped, renderable, isProtected,
557 std::move(finishedCallback), &data);
558}
559
560GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
561 SkColorType skColorType,
562 const SkColor4f& color,
563 GrMipmapped mipMapped,
564 GrRenderable renderable,
565 GrProtected isProtected,
566 GrGpuFinishedProc finishedProc,
567 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500568 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400569
570 if (this->abandoned()) {
571 return {};
572 }
573
574 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
575 if (!format.isValid()) {
576 return {};
577 }
578
579 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
580 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
581
582 GrGpu::BackendTextureData data(swizzledColor);
583 return create_and_update_backend_texture(this, {width, height}, format,
584 mipMapped, renderable, isProtected,
585 std::move(finishedCallback), &data);
586}
587
588GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
589 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500590 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400591 GrRenderable renderable,
592 GrProtected isProtected,
593 GrGpuFinishedProc finishedProc,
594 GrGpuFinishedContext finishedContext) {
595 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
596
Brian Salomon694ff172020-11-04 16:54:28 -0500597 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400598
599 if (this->abandoned()) {
600 return {};
601 }
602
603 if (!srcData || numProvidedLevels <= 0) {
604 return {};
605 }
606
607 int baseWidth = srcData[0].width();
608 int baseHeight = srcData[0].height();
609 SkColorType colorType = srcData[0].colorType();
610
611 GrMipmapped mipMapped = GrMipmapped::kNo;
612 int numExpectedLevels = 1;
613 if (numProvidedLevels > 1) {
614 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
615 mipMapped = GrMipmapped::kYes;
616 }
617
618 if (numProvidedLevels != numExpectedLevels) {
619 return {};
620 }
621
622 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500623 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
624 srcData[0].height(),
625 backendFormat,
626 mipMapped,
627 renderable,
628 isProtected);
629 if (!beTex.isValid()) {
630 return {};
631 }
632 if (!update_texture_with_pixmaps(this->priv().getGpu(),
633 srcData,
634 numProvidedLevels,
635 beTex,
636 textureOrigin,
637 std::move(finishedCallback))) {
638 this->deleteBackendTexture(beTex);
639 return {};
640 }
641 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400642}
643
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400644bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
645 const SkColor4f& color,
646 GrGpuFinishedProc finishedProc,
647 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500648 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400649
650 if (this->abandoned()) {
651 return false;
652 }
653
654 GrGpu::BackendTextureData data(color);
655 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
656}
657
658bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
659 SkColorType skColorType,
660 const SkColor4f& color,
661 GrGpuFinishedProc finishedProc,
662 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500663 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400664
665 if (this->abandoned()) {
666 return false;
667 }
668
669 GrBackendFormat format = backendTexture.getBackendFormat();
670 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
671
672 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
673 return false;
674 }
675
676 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
677 GrGpu::BackendTextureData data(swizzle.applyTo(color));
678
679 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
680}
681
682bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
683 const SkPixmap srcData[],
684 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500685 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400686 GrGpuFinishedProc finishedProc,
687 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500688 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400689
690 if (this->abandoned()) {
691 return false;
692 }
693
694 if (!srcData || numLevels <= 0) {
695 return false;
696 }
697
698 int numExpectedLevels = 1;
699 if (backendTexture.hasMipmaps()) {
700 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
701 backendTexture.height()) + 1;
702 }
703 if (numLevels != numExpectedLevels) {
704 return false;
705 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500706 return update_texture_with_pixmaps(fGpu.get(),
707 srcData,
708 numLevels,
709 backendTexture,
710 textureOrigin,
711 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400712}
713
Adlai Holler64e13832020-10-13 08:21:56 -0400714//////////////////////////////////////////////////////////////////////////////
715
716static GrBackendTexture create_and_update_compressed_backend_texture(
717 GrDirectContext* dContext,
718 SkISize dimensions,
719 const GrBackendFormat& backendFormat,
720 GrMipmapped mipMapped,
721 GrProtected isProtected,
722 sk_sp<GrRefCntedCallback> finishedCallback,
723 const GrGpu::BackendTextureData* data) {
724 GrGpu* gpu = dContext->priv().getGpu();
725
726 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
727 mipMapped, isProtected);
728 if (!beTex.isValid()) {
729 return {};
730 }
731
732 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
733 beTex, std::move(finishedCallback), data)) {
734 dContext->deleteBackendTexture(beTex);
735 return {};
736 }
737 return beTex;
738}
739
740GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
741 const GrBackendFormat& backendFormat,
742 const SkColor4f& color,
743 GrMipmapped mipMapped,
744 GrProtected isProtected,
745 GrGpuFinishedProc finishedProc,
746 GrGpuFinishedContext finishedContext) {
747 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500748 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400749
750 if (this->abandoned()) {
751 return {};
752 }
753
754 GrGpu::BackendTextureData data(color);
755 return create_and_update_compressed_backend_texture(this, {width, height},
756 backendFormat, mipMapped, isProtected,
757 std::move(finishedCallback), &data);
758}
759
760GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
761 SkImage::CompressionType compression,
762 const SkColor4f& color,
763 GrMipmapped mipMapped,
764 GrProtected isProtected,
765 GrGpuFinishedProc finishedProc,
766 GrGpuFinishedContext finishedContext) {
767 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
768 GrBackendFormat format = this->compressedBackendFormat(compression);
769 return this->createCompressedBackendTexture(width, height, format, color,
770 mipMapped, isProtected, finishedProc,
771 finishedContext);
772}
773
774GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
775 const GrBackendFormat& backendFormat,
776 const void* compressedData,
777 size_t dataSize,
778 GrMipmapped mipMapped,
779 GrProtected isProtected,
780 GrGpuFinishedProc finishedProc,
781 GrGpuFinishedContext finishedContext) {
782 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500783 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400784
785 if (this->abandoned()) {
786 return {};
787 }
788
789 GrGpu::BackendTextureData data(compressedData, dataSize);
790 return create_and_update_compressed_backend_texture(this, {width, height},
791 backendFormat, mipMapped, isProtected,
792 std::move(finishedCallback), &data);
793}
794
795GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
796 SkImage::CompressionType compression,
797 const void* data, size_t dataSize,
798 GrMipmapped mipMapped,
799 GrProtected isProtected,
800 GrGpuFinishedProc finishedProc,
801 GrGpuFinishedContext finishedContext) {
802 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
803 GrBackendFormat format = this->compressedBackendFormat(compression);
804 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
805 isProtected, finishedProc, finishedContext);
806}
807
808bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
809 const SkColor4f& color,
810 GrGpuFinishedProc finishedProc,
811 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500812 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400813
814 if (this->abandoned()) {
815 return false;
816 }
817
818 GrGpu::BackendTextureData data(color);
819 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
820}
821
822bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
823 const void* compressedData,
824 size_t dataSize,
825 GrGpuFinishedProc finishedProc,
826 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500827 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400828
829 if (this->abandoned()) {
830 return false;
831 }
832
833 if (!compressedData) {
834 return false;
835 }
836
837 GrGpu::BackendTextureData data(compressedData, dataSize);
838
839 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
840}
841
Adlai Holler6d0745b2020-10-13 13:29:00 -0400842//////////////////////////////////////////////////////////////////////////////
843
844bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
845 const GrBackendSurfaceMutableState& state,
846 GrBackendSurfaceMutableState* previousState,
847 GrGpuFinishedProc finishedProc,
848 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500849 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400850
851 if (this->abandoned()) {
852 return false;
853 }
854
855 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
856}
857
858
859bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
860 const GrBackendSurfaceMutableState& state,
861 GrBackendSurfaceMutableState* previousState,
862 GrGpuFinishedProc finishedProc,
863 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500864 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400865
866 if (this->abandoned()) {
867 return false;
868 }
869
870 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
871 std::move(callback));
872}
873
874void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
875 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
876 // For the Vulkan backend we still must destroy the backend texture when the context is
877 // abandoned.
878 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
879 return;
880 }
881
882 fGpu->deleteBackendTexture(backendTex);
883}
884
885//////////////////////////////////////////////////////////////////////////////
886
887bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
888 return fGpu->precompileShader(key, data);
889}
890
891#ifdef SK_ENABLE_DUMP_GPU
892#include "include/core/SkString.h"
893#include "src/utils/SkJSONWriter.h"
894SkString GrDirectContext::dump() const {
895 SkDynamicMemoryWStream stream;
896 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
897 writer.beginObject();
898
899 writer.appendString("backend", GrBackendApiToStr(this->backend()));
900
901 writer.appendName("caps");
902 this->caps()->dumpJSON(&writer);
903
904 writer.appendName("gpu");
905 this->fGpu->dumpJSON(&writer);
906
907 writer.appendName("context");
908 this->dumpJSON(&writer);
909
910 // Flush JSON to the memory stream
911 writer.endObject();
912 writer.flush();
913
914 // Null terminate the JSON data in the memory stream
915 stream.write8(0);
916
917 // Allocate a string big enough to hold all the data, then copy out of the stream
918 SkString result(stream.bytesWritten());
919 stream.copyToAndReset(result.writable_str());
920 return result;
921}
922#endif
923
John Rosascoa9b348f2019-11-08 13:18:15 -0800924#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400925
Robert Phillipsf4f80112020-07-13 16:13:31 -0400926/*************************************************************************************************/
927sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500928 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500929 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500930}
931
Robert Phillipsf4f80112020-07-13 16:13:31 -0400932sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400933 return MakeGL(nullptr, options);
934}
935
Robert Phillipsf4f80112020-07-13 16:13:31 -0400936sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400937 GrContextOptions defaultOptions;
938 return MakeGL(nullptr, defaultOptions);
939}
940
Brian Salomon24069eb2020-06-24 10:19:52 -0400941#if GR_TEST_UTILS
942GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
943 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
944 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
945 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
946 // on the thing it captures. So we leak the context.
947 struct GetErrorContext {
948 SkRandom fRandom;
949 GrGLFunction<GrGLGetErrorFn> fGetError;
950 };
951
952 auto errorContext = new GetErrorContext;
953
954#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
955 __lsan_ignore_object(errorContext);
956#endif
957
958 errorContext->fGetError = original;
959
960 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
961 GrGLenum error = errorContext->fGetError();
962 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
963 error = GR_GL_OUT_OF_MEMORY;
964 }
965 return error;
966 });
967}
968#endif
969
Robert Phillipsf4f80112020-07-13 16:13:31 -0400970sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
971 const GrContextOptions& options) {
972 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400973#if GR_TEST_UTILS
974 if (options.fRandomGLOOM) {
975 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
976 copy->fFunctions.fGetError =
977 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
978#if GR_GL_CHECK_ERROR
979 // Suppress logging GL errors since we'll be synthetically generating them.
980 copy->suppressErrorLogging();
981#endif
982 glInterface = std::move(copy);
983 }
984#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400985 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
986 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500987 return nullptr;
988 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400989 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500990}
John Rosascoa9b348f2019-11-08 13:18:15 -0800991#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500992
Robert Phillipsf4f80112020-07-13 16:13:31 -0400993/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400994sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
995 GrContextOptions defaultOptions;
996 return MakeMock(mockOptions, defaultOptions);
997}
998
999sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1000 const GrContextOptions& options) {
1001 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1002
1003 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1004 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001005 return nullptr;
1006 }
Chris Daltona378b452019-12-11 13:24:11 -05001007
Robert Phillipsf4f80112020-07-13 16:13:31 -04001008 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001009}
1010
Greg Danielb4d89562018-10-03 18:44:49 +00001011#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001012/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001013sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1014 GrContextOptions defaultOptions;
1015 return MakeVulkan(backendContext, defaultOptions);
1016}
1017
1018sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1019 const GrContextOptions& options) {
1020 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1021
1022 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1023 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001024 return nullptr;
1025 }
1026
Robert Phillipsf4f80112020-07-13 16:13:31 -04001027 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001028}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001029#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001030
1031#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001032/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001033sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001034 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001035 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001036}
1037
Jim Van Verth351c9b52020-11-12 15:21:11 -05001038sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1039 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001040 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001041
Jim Van Verth351c9b52020-11-12 15:21:11 -05001042 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001043 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001044 return nullptr;
1045 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001046
Robert Phillipsf4f80112020-07-13 16:13:31 -04001047 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001048}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001049
1050// deprecated
1051sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1052 GrContextOptions defaultOptions;
1053 return MakeMetal(device, queue, defaultOptions);
1054}
1055
1056// deprecated
1057// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1058sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1059 const GrContextOptions& options) {
1060 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1061 GrMtlBackendContext backendContext = {};
1062 backendContext.fDevice.reset(device);
1063 backendContext.fQueue.reset(queue);
1064
1065 return GrDirectContext::MakeMetal(backendContext, options);
1066}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001067#endif
1068
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001069#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001070/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001071sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1072 GrContextOptions defaultOptions;
1073 return MakeDirect3D(backendContext, defaultOptions);
1074}
1075
1076sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1077 const GrContextOptions& options) {
1078 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1079
1080 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1081 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001082 return nullptr;
1083 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001084
Robert Phillipsf4f80112020-07-13 16:13:31 -04001085 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001086}
1087#endif
1088
Stephen White985741a2019-07-18 11:43:45 -04001089#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001090/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001091sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001092 GrContextOptions defaultOptions;
1093 return MakeDawn(device, defaultOptions);
1094}
1095
Robert Phillipsf4f80112020-07-13 16:13:31 -04001096sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1097 const GrContextOptions& options) {
1098 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001099
Robert Phillipsf4f80112020-07-13 16:13:31 -04001100 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1101 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001102 return nullptr;
1103 }
1104
Robert Phillipsf4f80112020-07-13 16:13:31 -04001105 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001106}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001107
Stephen White985741a2019-07-18 11:43:45 -04001108#endif