blob: cdad13126ec9c6ba662c6a5e4224c3034d3eed8c [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
Greg Daniela89b4302021-01-29 10:48:40 -050066 // We need to make sure all work is finished on the gpu before we start releasing resources.
67 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
68
Adlai Holler9555f292020-10-09 09:41:14 -040069 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040070
71 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
72 if (fResourceCache) {
73 fResourceCache->releaseAll();
74 }
Brian Salomon91a88f02021-02-04 15:34:32 -050075 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
76 // async pixel result don't try to destroy buffers off thread.
77 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040078}
Robert Phillipsa3457b82018-03-08 11:30:12 -050079
Adlai Holler61a591c2020-10-12 12:38:33 -040080sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
81 return INHERITED::threadSafeProxy();
82}
83
Adlai Hollera7a40442020-10-09 09:49:42 -040084void GrDirectContext::resetGLTextureBindings() {
85 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
86 return;
87 }
88 fGpu->resetTextureBindings();
89}
90
91void GrDirectContext::resetContext(uint32_t state) {
92 ASSERT_SINGLE_OWNER
93 fGpu->markContextDirty(state);
94}
95
Robert Phillipsad248452020-06-30 09:27:52 -040096void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040097 if (INHERITED::abandoned()) {
98 return;
99 }
100
Robert Phillipsad248452020-06-30 09:27:52 -0400101 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400102
Greg Daniela89b4302021-01-29 10:48:40 -0500103 // We need to make sure all work is finished on the gpu before we start releasing resources.
104 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
105
Adlai Hollera7a40442020-10-09 09:49:42 -0400106 fStrikeCache->freeAll();
107
108 fMappedBufferManager->abandon();
109
110 fResourceProvider->abandon();
111
Robert Phillipseb999bc2020-11-03 08:41:47 -0500112 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400113 fResourceCache->abandonAll();
114
115 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
116
Brian Salomon91a88f02021-02-04 15:34:32 -0500117 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400118 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500119
Robert Phillips079455c2020-08-11 15:18:46 -0400120 if (fSmallPathAtlasMgr) {
121 fSmallPathAtlasMgr->reset();
122 }
Robert Phillipsad248452020-06-30 09:27:52 -0400123 fAtlasManager->freeAll();
124}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500125
Adlai Hollera7a40442020-10-09 09:49:42 -0400126bool GrDirectContext::abandoned() {
127 if (INHERITED::abandoned()) {
128 return true;
129 }
130
131 if (fGpu && fGpu->isDeviceLost()) {
132 this->abandonContext();
133 return true;
134 }
135 return false;
136}
137
Adlai Holler61a591c2020-10-12 12:38:33 -0400138bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
139
Robert Phillipsad248452020-06-30 09:27:52 -0400140void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400141 if (INHERITED::abandoned()) {
142 return;
143 }
144
145 INHERITED::abandonContext();
146
Greg Daniela89b4302021-01-29 10:48:40 -0500147 // We need to make sure all work is finished on the gpu before we start releasing resources.
148 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
149
Adlai Holler61a591c2020-10-12 12:38:33 -0400150 fResourceProvider->abandon();
151
152 // Release all resources in the backend 3D API.
153 fResourceCache->releaseAll();
154
Brian Salomon91a88f02021-02-04 15:34:32 -0500155 // Must be after GrResourceCache::releaseAll().
156 fMappedBufferManager.reset();
157
Adlai Holler61a591c2020-10-12 12:38:33 -0400158 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400159 if (fSmallPathAtlasMgr) {
160 fSmallPathAtlasMgr->reset();
161 }
Robert Phillipsad248452020-06-30 09:27:52 -0400162 fAtlasManager->freeAll();
163}
Robert Phillips6db27c22019-05-01 10:43:56 -0400164
Robert Phillipsad248452020-06-30 09:27:52 -0400165void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400166 ASSERT_SINGLE_OWNER
167
168 if (this->abandoned()) {
169 return;
170 }
171
Robert Phillipsad248452020-06-30 09:27:52 -0400172 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400173 if (fSmallPathAtlasMgr) {
174 fSmallPathAtlasMgr->reset();
175 }
Robert Phillipsad248452020-06-30 09:27:52 -0400176 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500177
Adlai Holler4aa4c602020-10-12 13:58:52 -0400178 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
179 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
180 fStrikeCache->freeAll();
181
182 this->drawingManager()->freeGpuResources();
183
184 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400185}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500186
Robert Phillipsad248452020-06-30 09:27:52 -0400187bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400188 ASSERT_SINGLE_OWNER
189 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400190 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500191 }
192
Adlai Holler9555f292020-10-09 09:41:14 -0400193 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400194 if (!INHERITED::init()) {
195 return false;
196 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500197
Adlai Holler9555f292020-10-09 09:41:14 -0400198 SkASSERT(this->getTextBlobCache());
199 SkASSERT(this->threadSafeCache());
200
201 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500202 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400203 fResourceCache->setProxyProvider(this->proxyProvider());
204 fResourceCache->setThreadSafeCache(this->threadSafeCache());
205 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
206 this->singleOwner());
207 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
208
209 fDidTestPMConversions = false;
210
211 // DDL TODO: we need to think through how the task group & persistent cache
212 // get passed on to/shared between all the DDLRecorders created with this context.
213 if (this->options().fExecutor) {
214 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
215 }
216
217 fPersistentCache = this->options().fPersistentCache;
218 fShaderErrorHandler = this->options().fShaderErrorHandler;
219 if (!fShaderErrorHandler) {
220 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
221 }
222
Robert Phillipsad248452020-06-30 09:27:52 -0400223 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
224 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
225 // multitexturing supported only if range can represent the index + texcoords fully
226 !(this->caps()->shaderCaps()->floatIs32Bits() ||
227 this->caps()->shaderCaps()->integerSupport())) {
228 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
229 } else {
230 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
231 }
232
233 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
234
Robert Phillips3262bc82020-08-10 12:11:58 -0400235 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
236 this->options().fGlyphCacheTextureMaximumBytes,
237 allowMultitexturing);
238 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400239
240 return true;
241}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500242
Adlai Holler3a508e92020-10-12 13:58:01 -0400243void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
244 ASSERT_SINGLE_OWNER
245
246 if (resourceCount) {
247 *resourceCount = fResourceCache->getBudgetedResourceCount();
248 }
249 if (resourceBytes) {
250 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
251 }
252}
253
254size_t GrDirectContext::getResourceCachePurgeableBytes() const {
255 ASSERT_SINGLE_OWNER
256 return fResourceCache->getPurgeableBytes();
257}
258
259void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
260 ASSERT_SINGLE_OWNER
261 if (maxResources) {
262 *maxResources = -1;
263 }
264 if (maxResourceBytes) {
265 *maxResourceBytes = this->getResourceCacheLimit();
266 }
267}
268
269size_t GrDirectContext::getResourceCacheLimit() const {
270 ASSERT_SINGLE_OWNER
271 return fResourceCache->getMaxResourceBytes();
272}
273
274void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
275 ASSERT_SINGLE_OWNER
276 this->setResourceCacheLimit(maxResourceBytes);
277}
278
279void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
280 ASSERT_SINGLE_OWNER
281 fResourceCache->setLimit(maxResourceBytes);
282}
283
Adlai Holler4aa4c602020-10-12 13:58:52 -0400284void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
285 ASSERT_SINGLE_OWNER
286
287 if (this->abandoned()) {
288 return;
289 }
290
291 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
292 fResourceCache->purgeAsNeeded();
293
294 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
295 // place to purge stale blobs
296 this->getTextBlobCache()->purgeStaleBlobs();
297}
298
299void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
300 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
301
302 ASSERT_SINGLE_OWNER
303
304 if (this->abandoned()) {
305 return;
306 }
307
308 this->checkAsyncWorkCompletion();
309 fMappedBufferManager->process();
310 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
311
312 fResourceCache->purgeAsNeeded();
313 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
314
315 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
316 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
317 }
318
319 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
320 // place to purge stale blobs
321 this->getTextBlobCache()->purgeStaleBlobs();
322}
323
324void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
325 ASSERT_SINGLE_OWNER
326
327 if (this->abandoned()) {
328 return;
329 }
330
331 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
332}
333
Adlai Holler3acc69a2020-10-13 08:20:51 -0400334////////////////////////////////////////////////////////////////////////////////
335bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
336 bool deleteSemaphoresAfterWait) {
337 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
338 return false;
339 }
340 GrWrapOwnership ownership =
341 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
342 for (int i = 0; i < numSemaphores; ++i) {
343 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
344 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
345 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
346 // to begin with. Therefore, it is fine to not wait on it.
347 if (sema) {
348 fGpu->waitSemaphore(sema.get());
349 }
350 }
351 return true;
352}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400353
Robert Phillips5edf5102020-08-10 16:30:36 -0400354GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400355 if (!fSmallPathAtlasMgr) {
356 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
357
358 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
359 }
360
361 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
362 return nullptr;
363 }
364
365 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400366}
367
Adlai Holler3acc69a2020-10-13 08:20:51 -0400368////////////////////////////////////////////////////////////////////////////////
369
370GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
371 ASSERT_SINGLE_OWNER
372 if (this->abandoned()) {
373 if (info.fFinishedProc) {
374 info.fFinishedProc(info.fFinishedContext);
375 }
376 if (info.fSubmittedProc) {
377 info.fSubmittedProc(info.fSubmittedContext, false);
378 }
379 return GrSemaphoresSubmitted::kNo;
380 }
381
Robert Phillips80bfda82020-11-12 09:23:36 -0500382 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
383 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400384}
385
386bool GrDirectContext::submit(bool syncCpu) {
387 ASSERT_SINGLE_OWNER
388 if (this->abandoned()) {
389 return false;
390 }
391
392 if (!fGpu) {
393 return false;
394 }
395
396 return fGpu->submitToGpu(syncCpu);
397}
398
399////////////////////////////////////////////////////////////////////////////////
400
401void GrDirectContext::checkAsyncWorkCompletion() {
402 if (fGpu) {
403 fGpu->checkFinishProcs();
404 }
405}
406
Greg Daniela89b4302021-01-29 10:48:40 -0500407void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
408 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
409 fGpu->finishOutstandingGpuWork();
410 this->checkAsyncWorkCompletion();
411 }
412}
413
Adlai Holler3acc69a2020-10-13 08:20:51 -0400414////////////////////////////////////////////////////////////////////////////////
415
416void GrDirectContext::storeVkPipelineCacheData() {
417 if (fGpu) {
418 fGpu->storeVkPipelineCacheData();
419 }
420}
421
422////////////////////////////////////////////////////////////////////////////////
423
424bool GrDirectContext::supportsDistanceFieldText() const {
425 return this->caps()->shaderCaps()->supportsDistanceFieldText();
426}
427
428//////////////////////////////////////////////////////////////////////////////
429
430void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
431 ASSERT_SINGLE_OWNER
432 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
433 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
434 this->getTextBlobCache()->usedBytes());
435}
436
437size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
438 bool useNextPow2) {
439 if (!image->isTextureBacked()) {
440 return 0;
441 }
442 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
443 GrTextureProxy* proxy = gpuImage->peekProxy();
444 if (!proxy) {
445 return 0;
446 }
447
448 int colorSamplesPerPixel = 1;
449 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
450 colorSamplesPerPixel, mipMapped, useNextPow2);
451}
452
Adlai Holler98dd0042020-10-13 10:04:00 -0400453GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
454 const GrBackendFormat& backendFormat,
455 GrMipmapped mipMapped,
456 GrRenderable renderable,
457 GrProtected isProtected) {
458 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
459 if (this->abandoned()) {
460 return GrBackendTexture();
461 }
462
463 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
464 mipMapped, isProtected);
465}
466
467GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
468 SkColorType skColorType,
469 GrMipmapped mipMapped,
470 GrRenderable renderable,
471 GrProtected isProtected) {
472 if (this->abandoned()) {
473 return GrBackendTexture();
474 }
475
476 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
477
478 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
479}
480
481static GrBackendTexture create_and_update_backend_texture(
482 GrDirectContext* dContext,
483 SkISize dimensions,
484 const GrBackendFormat& backendFormat,
485 GrMipmapped mipMapped,
486 GrRenderable renderable,
487 GrProtected isProtected,
488 sk_sp<GrRefCntedCallback> finishedCallback,
489 const GrGpu::BackendTextureData* data) {
490 GrGpu* gpu = dContext->priv().getGpu();
491
492 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
493 mipMapped, isProtected);
494 if (!beTex.isValid()) {
495 return {};
496 }
497
498 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
499 std::move(finishedCallback),
500 data)) {
501 dContext->deleteBackendTexture(beTex);
502 return {};
503 }
504 return beTex;
505}
506
Brian Salomonb5f880a2020-12-07 11:30:16 -0500507static bool update_texture_with_pixmaps(GrGpu* gpu,
508 const SkPixmap* srcData,
509 int numLevels,
510 const GrBackendTexture& backendTexture,
511 GrSurfaceOrigin textureOrigin,
512 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon759217e2021-01-31 13:16:39 -0500513 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
514 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
515
516 size_t size = 0;
517 for (int i = 0; i < numLevels; ++i) {
518 size_t minRowBytes = srcData[i].info().minRowBytes();
519 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
520 size += minRowBytes * srcData[i].height();
521 }
522 }
523
Brian Salomonb5f880a2020-12-07 11:30:16 -0500524 std::unique_ptr<char[]> tempStorage;
Brian Salomon759217e2021-01-31 13:16:39 -0500525 if (size) {
Mike Klein09289632021-01-30 15:51:19 +0000526 tempStorage.reset(new char[size]);
Brian Salomon759217e2021-01-31 13:16:39 -0500527 }
528 size = 0;
529 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
530 for (int i = 0; i < numLevels; ++i) {
531 size_t minRowBytes = srcData[i].info().minRowBytes();
532 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
533 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
534 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
535 size += minRowBytes*srcData[i].height();
536 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500537 tempPixmaps[i] = srcData[i];
538 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500539 }
Brian Salomon759217e2021-01-31 13:16:39 -0500540
Brian Salomon05487ab2020-12-23 20:32:22 -0500541 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500542 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
543}
544
Adlai Holler98dd0042020-10-13 10:04:00 -0400545GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
546 const GrBackendFormat& backendFormat,
547 const SkColor4f& color,
548 GrMipmapped mipMapped,
549 GrRenderable renderable,
550 GrProtected isProtected,
551 GrGpuFinishedProc finishedProc,
552 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500553 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400554
555 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
556 if (this->abandoned()) {
557 return {};
558 }
559
560 GrGpu::BackendTextureData data(color);
561 return create_and_update_backend_texture(this, {width, height},
562 backendFormat, mipMapped, renderable, isProtected,
563 std::move(finishedCallback), &data);
564}
565
566GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
567 SkColorType skColorType,
568 const SkColor4f& color,
569 GrMipmapped mipMapped,
570 GrRenderable renderable,
571 GrProtected isProtected,
572 GrGpuFinishedProc finishedProc,
573 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500574 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400575
576 if (this->abandoned()) {
577 return {};
578 }
579
580 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
581 if (!format.isValid()) {
582 return {};
583 }
584
585 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
586 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
587
588 GrGpu::BackendTextureData data(swizzledColor);
589 return create_and_update_backend_texture(this, {width, height}, format,
590 mipMapped, renderable, isProtected,
591 std::move(finishedCallback), &data);
592}
593
594GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
595 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500596 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400597 GrRenderable renderable,
598 GrProtected isProtected,
599 GrGpuFinishedProc finishedProc,
600 GrGpuFinishedContext finishedContext) {
601 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
602
Brian Salomon694ff172020-11-04 16:54:28 -0500603 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400604
605 if (this->abandoned()) {
606 return {};
607 }
608
609 if (!srcData || numProvidedLevels <= 0) {
610 return {};
611 }
612
613 int baseWidth = srcData[0].width();
614 int baseHeight = srcData[0].height();
615 SkColorType colorType = srcData[0].colorType();
616
617 GrMipmapped mipMapped = GrMipmapped::kNo;
618 int numExpectedLevels = 1;
619 if (numProvidedLevels > 1) {
620 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
621 mipMapped = GrMipmapped::kYes;
622 }
623
624 if (numProvidedLevels != numExpectedLevels) {
625 return {};
626 }
627
628 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500629 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
630 srcData[0].height(),
631 backendFormat,
632 mipMapped,
633 renderable,
634 isProtected);
635 if (!beTex.isValid()) {
636 return {};
637 }
638 if (!update_texture_with_pixmaps(this->priv().getGpu(),
639 srcData,
640 numProvidedLevels,
641 beTex,
642 textureOrigin,
643 std::move(finishedCallback))) {
644 this->deleteBackendTexture(beTex);
645 return {};
646 }
647 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400648}
649
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400650bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
651 const SkColor4f& color,
652 GrGpuFinishedProc finishedProc,
653 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500654 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400655
656 if (this->abandoned()) {
657 return false;
658 }
659
660 GrGpu::BackendTextureData data(color);
661 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
662}
663
664bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
665 SkColorType skColorType,
666 const SkColor4f& color,
667 GrGpuFinishedProc finishedProc,
668 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500669 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400670
671 if (this->abandoned()) {
672 return false;
673 }
674
675 GrBackendFormat format = backendTexture.getBackendFormat();
676 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
677
678 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
679 return false;
680 }
681
682 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
683 GrGpu::BackendTextureData data(swizzle.applyTo(color));
684
685 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
686}
687
688bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
689 const SkPixmap srcData[],
690 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500691 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400692 GrGpuFinishedProc finishedProc,
693 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500694 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400695
696 if (this->abandoned()) {
697 return false;
698 }
699
700 if (!srcData || numLevels <= 0) {
701 return false;
702 }
703
704 int numExpectedLevels = 1;
705 if (backendTexture.hasMipmaps()) {
706 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
707 backendTexture.height()) + 1;
708 }
709 if (numLevels != numExpectedLevels) {
710 return false;
711 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500712 return update_texture_with_pixmaps(fGpu.get(),
713 srcData,
714 numLevels,
715 backendTexture,
716 textureOrigin,
717 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400718}
719
Adlai Holler64e13832020-10-13 08:21:56 -0400720//////////////////////////////////////////////////////////////////////////////
721
722static GrBackendTexture create_and_update_compressed_backend_texture(
723 GrDirectContext* dContext,
724 SkISize dimensions,
725 const GrBackendFormat& backendFormat,
726 GrMipmapped mipMapped,
727 GrProtected isProtected,
728 sk_sp<GrRefCntedCallback> finishedCallback,
729 const GrGpu::BackendTextureData* data) {
730 GrGpu* gpu = dContext->priv().getGpu();
731
732 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
733 mipMapped, isProtected);
734 if (!beTex.isValid()) {
735 return {};
736 }
737
738 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
739 beTex, std::move(finishedCallback), data)) {
740 dContext->deleteBackendTexture(beTex);
741 return {};
742 }
743 return beTex;
744}
745
746GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
747 const GrBackendFormat& backendFormat,
748 const SkColor4f& color,
749 GrMipmapped mipMapped,
750 GrProtected isProtected,
751 GrGpuFinishedProc finishedProc,
752 GrGpuFinishedContext finishedContext) {
753 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500754 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400755
756 if (this->abandoned()) {
757 return {};
758 }
759
760 GrGpu::BackendTextureData data(color);
761 return create_and_update_compressed_backend_texture(this, {width, height},
762 backendFormat, mipMapped, isProtected,
763 std::move(finishedCallback), &data);
764}
765
766GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
767 SkImage::CompressionType compression,
768 const SkColor4f& color,
769 GrMipmapped mipMapped,
770 GrProtected isProtected,
771 GrGpuFinishedProc finishedProc,
772 GrGpuFinishedContext finishedContext) {
773 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
774 GrBackendFormat format = this->compressedBackendFormat(compression);
775 return this->createCompressedBackendTexture(width, height, format, color,
776 mipMapped, isProtected, finishedProc,
777 finishedContext);
778}
779
780GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
781 const GrBackendFormat& backendFormat,
782 const void* compressedData,
783 size_t dataSize,
784 GrMipmapped mipMapped,
785 GrProtected isProtected,
786 GrGpuFinishedProc finishedProc,
787 GrGpuFinishedContext finishedContext) {
788 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500789 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400790
791 if (this->abandoned()) {
792 return {};
793 }
794
795 GrGpu::BackendTextureData data(compressedData, dataSize);
796 return create_and_update_compressed_backend_texture(this, {width, height},
797 backendFormat, mipMapped, isProtected,
798 std::move(finishedCallback), &data);
799}
800
801GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
802 SkImage::CompressionType compression,
803 const void* data, size_t dataSize,
804 GrMipmapped mipMapped,
805 GrProtected isProtected,
806 GrGpuFinishedProc finishedProc,
807 GrGpuFinishedContext finishedContext) {
808 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
809 GrBackendFormat format = this->compressedBackendFormat(compression);
810 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
811 isProtected, finishedProc, finishedContext);
812}
813
814bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
815 const SkColor4f& color,
816 GrGpuFinishedProc finishedProc,
817 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500818 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400819
820 if (this->abandoned()) {
821 return false;
822 }
823
824 GrGpu::BackendTextureData data(color);
825 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
826}
827
828bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
829 const void* compressedData,
830 size_t dataSize,
831 GrGpuFinishedProc finishedProc,
832 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500833 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400834
835 if (this->abandoned()) {
836 return false;
837 }
838
839 if (!compressedData) {
840 return false;
841 }
842
843 GrGpu::BackendTextureData data(compressedData, dataSize);
844
845 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
846}
847
Adlai Holler6d0745b2020-10-13 13:29:00 -0400848//////////////////////////////////////////////////////////////////////////////
849
850bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
851 const GrBackendSurfaceMutableState& state,
852 GrBackendSurfaceMutableState* previousState,
853 GrGpuFinishedProc finishedProc,
854 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500855 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400856
857 if (this->abandoned()) {
858 return false;
859 }
860
861 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
862}
863
864
865bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
866 const GrBackendSurfaceMutableState& state,
867 GrBackendSurfaceMutableState* previousState,
868 GrGpuFinishedProc finishedProc,
869 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500870 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400871
872 if (this->abandoned()) {
873 return false;
874 }
875
876 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
877 std::move(callback));
878}
879
880void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
881 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
882 // For the Vulkan backend we still must destroy the backend texture when the context is
883 // abandoned.
884 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
885 return;
886 }
887
888 fGpu->deleteBackendTexture(backendTex);
889}
890
891//////////////////////////////////////////////////////////////////////////////
892
893bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
894 return fGpu->precompileShader(key, data);
895}
896
897#ifdef SK_ENABLE_DUMP_GPU
898#include "include/core/SkString.h"
899#include "src/utils/SkJSONWriter.h"
900SkString GrDirectContext::dump() const {
901 SkDynamicMemoryWStream stream;
902 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
903 writer.beginObject();
904
905 writer.appendString("backend", GrBackendApiToStr(this->backend()));
906
907 writer.appendName("caps");
908 this->caps()->dumpJSON(&writer);
909
910 writer.appendName("gpu");
911 this->fGpu->dumpJSON(&writer);
912
913 writer.appendName("context");
914 this->dumpJSON(&writer);
915
916 // Flush JSON to the memory stream
917 writer.endObject();
918 writer.flush();
919
920 // Null terminate the JSON data in the memory stream
921 stream.write8(0);
922
923 // Allocate a string big enough to hold all the data, then copy out of the stream
924 SkString result(stream.bytesWritten());
925 stream.copyToAndReset(result.writable_str());
926 return result;
927}
928#endif
929
John Rosascoa9b348f2019-11-08 13:18:15 -0800930#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400931
Robert Phillipsf4f80112020-07-13 16:13:31 -0400932/*************************************************************************************************/
933sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500934 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500935 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500936}
937
Robert Phillipsf4f80112020-07-13 16:13:31 -0400938sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400939 return MakeGL(nullptr, options);
940}
941
Robert Phillipsf4f80112020-07-13 16:13:31 -0400942sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400943 GrContextOptions defaultOptions;
944 return MakeGL(nullptr, defaultOptions);
945}
946
Brian Salomon24069eb2020-06-24 10:19:52 -0400947#if GR_TEST_UTILS
948GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
949 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
950 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
951 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
952 // on the thing it captures. So we leak the context.
953 struct GetErrorContext {
954 SkRandom fRandom;
955 GrGLFunction<GrGLGetErrorFn> fGetError;
956 };
957
958 auto errorContext = new GetErrorContext;
959
960#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
961 __lsan_ignore_object(errorContext);
962#endif
963
964 errorContext->fGetError = original;
965
966 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
967 GrGLenum error = errorContext->fGetError();
968 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
969 error = GR_GL_OUT_OF_MEMORY;
970 }
971 return error;
972 });
973}
974#endif
975
Robert Phillipsf4f80112020-07-13 16:13:31 -0400976sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
977 const GrContextOptions& options) {
978 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400979#if GR_TEST_UTILS
980 if (options.fRandomGLOOM) {
981 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
982 copy->fFunctions.fGetError =
983 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
984#if GR_GL_CHECK_ERROR
985 // Suppress logging GL errors since we'll be synthetically generating them.
986 copy->suppressErrorLogging();
987#endif
988 glInterface = std::move(copy);
989 }
990#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400991 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
992 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500993 return nullptr;
994 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400995 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500996}
John Rosascoa9b348f2019-11-08 13:18:15 -0800997#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500998
Robert Phillipsf4f80112020-07-13 16:13:31 -0400999/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001000sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1001 GrContextOptions defaultOptions;
1002 return MakeMock(mockOptions, defaultOptions);
1003}
1004
1005sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1006 const GrContextOptions& options) {
1007 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1008
1009 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1010 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001011 return nullptr;
1012 }
Chris Daltona378b452019-12-11 13:24:11 -05001013
Robert Phillipsf4f80112020-07-13 16:13:31 -04001014 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001015}
1016
Greg Danielb4d89562018-10-03 18:44:49 +00001017#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001018/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001019sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1020 GrContextOptions defaultOptions;
1021 return MakeVulkan(backendContext, defaultOptions);
1022}
1023
1024sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1025 const GrContextOptions& options) {
1026 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1027
1028 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1029 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001030 return nullptr;
1031 }
1032
Robert Phillipsf4f80112020-07-13 16:13:31 -04001033 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001034}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001035#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001036
1037#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001038/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001039sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001040 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001041 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001042}
1043
Jim Van Verth351c9b52020-11-12 15:21:11 -05001044sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1045 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001046 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001047
Jim Van Verth351c9b52020-11-12 15:21:11 -05001048 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001049 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001050 return nullptr;
1051 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001052
Robert Phillipsf4f80112020-07-13 16:13:31 -04001053 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001054}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001055
1056// deprecated
1057sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1058 GrContextOptions defaultOptions;
1059 return MakeMetal(device, queue, defaultOptions);
1060}
1061
1062// deprecated
1063// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1064sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1065 const GrContextOptions& options) {
1066 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1067 GrMtlBackendContext backendContext = {};
1068 backendContext.fDevice.reset(device);
1069 backendContext.fQueue.reset(queue);
1070
1071 return GrDirectContext::MakeMetal(backendContext, options);
1072}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001073#endif
1074
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001075#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001076/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001077sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1078 GrContextOptions defaultOptions;
1079 return MakeDirect3D(backendContext, defaultOptions);
1080}
1081
1082sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1083 const GrContextOptions& options) {
1084 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1085
1086 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1087 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001088 return nullptr;
1089 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001090
Robert Phillipsf4f80112020-07-13 16:13:31 -04001091 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001092}
1093#endif
1094
Stephen White985741a2019-07-18 11:43:45 -04001095#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001096/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001097sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001098 GrContextOptions defaultOptions;
1099 return MakeDawn(device, defaultOptions);
1100}
1101
Robert Phillipsf4f80112020-07-13 16:13:31 -04001102sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1103 const GrContextOptions& options) {
1104 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001105
Robert Phillipsf4f80112020-07-13 16:13:31 -04001106 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1107 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001108 return nullptr;
1109 }
1110
Robert Phillipsf4f80112020-07-13 16:13:31 -04001111 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001112}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001113
Stephen White985741a2019-07-18 11:43:45 -04001114#endif