blob: 33010275d2318f243c557ccc9b177ca86f80f68e [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
Greg Daniela89b4302021-01-29 10:48:40 -050066 // We need to make sure all work is finished on the gpu before we start releasing resources.
67 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
68
Adlai Holler9555f292020-10-09 09:41:14 -040069 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040070
71 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
72 if (fResourceCache) {
73 fResourceCache->releaseAll();
74 }
Brian Salomon91a88f02021-02-04 15:34:32 -050075 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
76 // async pixel result don't try to destroy buffers off thread.
77 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040078}
Robert Phillipsa3457b82018-03-08 11:30:12 -050079
Adlai Holler61a591c2020-10-12 12:38:33 -040080sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
81 return INHERITED::threadSafeProxy();
82}
83
Adlai Hollera7a40442020-10-09 09:49:42 -040084void GrDirectContext::resetGLTextureBindings() {
85 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
86 return;
87 }
88 fGpu->resetTextureBindings();
89}
90
91void GrDirectContext::resetContext(uint32_t state) {
92 ASSERT_SINGLE_OWNER
93 fGpu->markContextDirty(state);
94}
95
Robert Phillipsad248452020-06-30 09:27:52 -040096void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040097 if (INHERITED::abandoned()) {
98 return;
99 }
100
Robert Phillipsad248452020-06-30 09:27:52 -0400101 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400102
Greg Daniela89b4302021-01-29 10:48:40 -0500103 // We need to make sure all work is finished on the gpu before we start releasing resources.
104 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
105
Adlai Hollera7a40442020-10-09 09:49:42 -0400106 fStrikeCache->freeAll();
107
108 fMappedBufferManager->abandon();
109
110 fResourceProvider->abandon();
111
Robert Phillipseb999bc2020-11-03 08:41:47 -0500112 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400113 fResourceCache->abandonAll();
114
115 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
116
Brian Salomon91a88f02021-02-04 15:34:32 -0500117 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400118 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500119
Robert Phillips079455c2020-08-11 15:18:46 -0400120 if (fSmallPathAtlasMgr) {
121 fSmallPathAtlasMgr->reset();
122 }
Robert Phillipsad248452020-06-30 09:27:52 -0400123 fAtlasManager->freeAll();
124}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500125
Adlai Hollera7a40442020-10-09 09:49:42 -0400126bool GrDirectContext::abandoned() {
127 if (INHERITED::abandoned()) {
128 return true;
129 }
130
131 if (fGpu && fGpu->isDeviceLost()) {
132 this->abandonContext();
133 return true;
134 }
135 return false;
136}
137
Adlai Holler61a591c2020-10-12 12:38:33 -0400138bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
139
Robert Phillipsad248452020-06-30 09:27:52 -0400140void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400141 if (INHERITED::abandoned()) {
142 return;
143 }
144
145 INHERITED::abandonContext();
146
Greg Daniela89b4302021-01-29 10:48:40 -0500147 // We need to make sure all work is finished on the gpu before we start releasing resources.
148 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
149
Adlai Holler61a591c2020-10-12 12:38:33 -0400150 fResourceProvider->abandon();
151
152 // Release all resources in the backend 3D API.
153 fResourceCache->releaseAll();
154
Brian Salomon91a88f02021-02-04 15:34:32 -0500155 // Must be after GrResourceCache::releaseAll().
156 fMappedBufferManager.reset();
157
Adlai Holler61a591c2020-10-12 12:38:33 -0400158 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400159 if (fSmallPathAtlasMgr) {
160 fSmallPathAtlasMgr->reset();
161 }
Robert Phillipsad248452020-06-30 09:27:52 -0400162 fAtlasManager->freeAll();
163}
Robert Phillips6db27c22019-05-01 10:43:56 -0400164
Robert Phillipsad248452020-06-30 09:27:52 -0400165void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400166 ASSERT_SINGLE_OWNER
167
168 if (this->abandoned()) {
169 return;
170 }
171
Robert Phillipsad248452020-06-30 09:27:52 -0400172 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400173 if (fSmallPathAtlasMgr) {
174 fSmallPathAtlasMgr->reset();
175 }
Robert Phillipsad248452020-06-30 09:27:52 -0400176 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500177
Adlai Holler4aa4c602020-10-12 13:58:52 -0400178 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
179 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
180 fStrikeCache->freeAll();
181
182 this->drawingManager()->freeGpuResources();
183
184 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400185}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500186
Robert Phillipsad248452020-06-30 09:27:52 -0400187bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400188 ASSERT_SINGLE_OWNER
189 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400190 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500191 }
192
Adlai Holler9555f292020-10-09 09:41:14 -0400193 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400194 if (!INHERITED::init()) {
195 return false;
196 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500197
Adlai Holler9555f292020-10-09 09:41:14 -0400198 SkASSERT(this->getTextBlobCache());
199 SkASSERT(this->threadSafeCache());
200
201 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500202 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400203 fResourceCache->setProxyProvider(this->proxyProvider());
204 fResourceCache->setThreadSafeCache(this->threadSafeCache());
205 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
206 this->singleOwner());
207 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
208
209 fDidTestPMConversions = false;
210
211 // DDL TODO: we need to think through how the task group & persistent cache
212 // get passed on to/shared between all the DDLRecorders created with this context.
213 if (this->options().fExecutor) {
214 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
215 }
216
217 fPersistentCache = this->options().fPersistentCache;
218 fShaderErrorHandler = this->options().fShaderErrorHandler;
219 if (!fShaderErrorHandler) {
220 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
221 }
222
Robert Phillipsad248452020-06-30 09:27:52 -0400223 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
224 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
225 // multitexturing supported only if range can represent the index + texcoords fully
226 !(this->caps()->shaderCaps()->floatIs32Bits() ||
227 this->caps()->shaderCaps()->integerSupport())) {
228 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
229 } else {
230 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
231 }
232
233 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
234
Robert Phillips3262bc82020-08-10 12:11:58 -0400235 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
236 this->options().fGlyphCacheTextureMaximumBytes,
237 allowMultitexturing);
238 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400239
240 return true;
241}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500242
Adlai Holler3a508e92020-10-12 13:58:01 -0400243void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
244 ASSERT_SINGLE_OWNER
245
246 if (resourceCount) {
247 *resourceCount = fResourceCache->getBudgetedResourceCount();
248 }
249 if (resourceBytes) {
250 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
251 }
252}
253
254size_t GrDirectContext::getResourceCachePurgeableBytes() const {
255 ASSERT_SINGLE_OWNER
256 return fResourceCache->getPurgeableBytes();
257}
258
259void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
260 ASSERT_SINGLE_OWNER
261 if (maxResources) {
262 *maxResources = -1;
263 }
264 if (maxResourceBytes) {
265 *maxResourceBytes = this->getResourceCacheLimit();
266 }
267}
268
269size_t GrDirectContext::getResourceCacheLimit() const {
270 ASSERT_SINGLE_OWNER
271 return fResourceCache->getMaxResourceBytes();
272}
273
274void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
275 ASSERT_SINGLE_OWNER
276 this->setResourceCacheLimit(maxResourceBytes);
277}
278
279void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
280 ASSERT_SINGLE_OWNER
281 fResourceCache->setLimit(maxResourceBytes);
282}
283
Adlai Holler4aa4c602020-10-12 13:58:52 -0400284void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
285 ASSERT_SINGLE_OWNER
286
287 if (this->abandoned()) {
288 return;
289 }
290
291 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
292 fResourceCache->purgeAsNeeded();
293
294 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
295 // place to purge stale blobs
296 this->getTextBlobCache()->purgeStaleBlobs();
297}
298
299void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
300 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
301
302 ASSERT_SINGLE_OWNER
303
304 if (this->abandoned()) {
305 return;
306 }
307
308 this->checkAsyncWorkCompletion();
309 fMappedBufferManager->process();
310 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
311
312 fResourceCache->purgeAsNeeded();
313 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
314
315 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
316 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
317 }
318
319 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
320 // place to purge stale blobs
321 this->getTextBlobCache()->purgeStaleBlobs();
322}
323
324void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
325 ASSERT_SINGLE_OWNER
326
327 if (this->abandoned()) {
328 return;
329 }
330
331 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
332}
333
Adlai Holler3acc69a2020-10-13 08:20:51 -0400334////////////////////////////////////////////////////////////////////////////////
335bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
336 bool deleteSemaphoresAfterWait) {
337 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
338 return false;
339 }
340 GrWrapOwnership ownership =
341 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
342 for (int i = 0; i < numSemaphores; ++i) {
343 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
344 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
345 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
346 // to begin with. Therefore, it is fine to not wait on it.
347 if (sema) {
348 fGpu->waitSemaphore(sema.get());
349 }
350 }
351 return true;
352}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400353
Robert Phillips5edf5102020-08-10 16:30:36 -0400354GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400355 if (!fSmallPathAtlasMgr) {
356 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
357
358 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
359 }
360
361 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
362 return nullptr;
363 }
364
365 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400366}
367
Adlai Holler3acc69a2020-10-13 08:20:51 -0400368////////////////////////////////////////////////////////////////////////////////
369
370GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
371 ASSERT_SINGLE_OWNER
372 if (this->abandoned()) {
373 if (info.fFinishedProc) {
374 info.fFinishedProc(info.fFinishedContext);
375 }
376 if (info.fSubmittedProc) {
377 info.fSubmittedProc(info.fSubmittedContext, false);
378 }
379 return GrSemaphoresSubmitted::kNo;
380 }
381
Robert Phillips80bfda82020-11-12 09:23:36 -0500382 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
383 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400384}
385
386bool GrDirectContext::submit(bool syncCpu) {
387 ASSERT_SINGLE_OWNER
388 if (this->abandoned()) {
389 return false;
390 }
391
392 if (!fGpu) {
393 return false;
394 }
395
396 return fGpu->submitToGpu(syncCpu);
397}
398
399////////////////////////////////////////////////////////////////////////////////
400
401void GrDirectContext::checkAsyncWorkCompletion() {
402 if (fGpu) {
403 fGpu->checkFinishProcs();
404 }
405}
406
Greg Daniela89b4302021-01-29 10:48:40 -0500407void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
408 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
409 fGpu->finishOutstandingGpuWork();
410 this->checkAsyncWorkCompletion();
411 }
412}
413
Adlai Holler3acc69a2020-10-13 08:20:51 -0400414////////////////////////////////////////////////////////////////////////////////
415
416void GrDirectContext::storeVkPipelineCacheData() {
417 if (fGpu) {
418 fGpu->storeVkPipelineCacheData();
419 }
420}
421
422////////////////////////////////////////////////////////////////////////////////
423
424bool GrDirectContext::supportsDistanceFieldText() const {
425 return this->caps()->shaderCaps()->supportsDistanceFieldText();
426}
427
428//////////////////////////////////////////////////////////////////////////////
429
430void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
431 ASSERT_SINGLE_OWNER
432 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
433 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
434 this->getTextBlobCache()->usedBytes());
435}
436
Adlai Holler98dd0042020-10-13 10:04:00 -0400437GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
438 const GrBackendFormat& backendFormat,
439 GrMipmapped mipMapped,
440 GrRenderable renderable,
441 GrProtected isProtected) {
442 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
443 if (this->abandoned()) {
444 return GrBackendTexture();
445 }
446
447 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
448 mipMapped, isProtected);
449}
450
451GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
452 SkColorType skColorType,
453 GrMipmapped mipMapped,
454 GrRenderable renderable,
455 GrProtected isProtected) {
456 if (this->abandoned()) {
457 return GrBackendTexture();
458 }
459
460 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
461
462 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
463}
464
465static GrBackendTexture create_and_update_backend_texture(
466 GrDirectContext* dContext,
467 SkISize dimensions,
468 const GrBackendFormat& backendFormat,
469 GrMipmapped mipMapped,
470 GrRenderable renderable,
471 GrProtected isProtected,
472 sk_sp<GrRefCntedCallback> finishedCallback,
473 const GrGpu::BackendTextureData* data) {
474 GrGpu* gpu = dContext->priv().getGpu();
475
476 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
477 mipMapped, isProtected);
478 if (!beTex.isValid()) {
479 return {};
480 }
481
482 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
483 std::move(finishedCallback),
484 data)) {
485 dContext->deleteBackendTexture(beTex);
486 return {};
487 }
488 return beTex;
489}
490
Brian Salomonb5f880a2020-12-07 11:30:16 -0500491static bool update_texture_with_pixmaps(GrGpu* gpu,
492 const SkPixmap* srcData,
493 int numLevels,
494 const GrBackendTexture& backendTexture,
495 GrSurfaceOrigin textureOrigin,
496 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon759217e2021-01-31 13:16:39 -0500497 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
498 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
499
500 size_t size = 0;
501 for (int i = 0; i < numLevels; ++i) {
502 size_t minRowBytes = srcData[i].info().minRowBytes();
503 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
504 size += minRowBytes * srcData[i].height();
505 }
506 }
507
Brian Salomonb5f880a2020-12-07 11:30:16 -0500508 std::unique_ptr<char[]> tempStorage;
Brian Salomon759217e2021-01-31 13:16:39 -0500509 if (size) {
Mike Klein09289632021-01-30 15:51:19 +0000510 tempStorage.reset(new char[size]);
Brian Salomon759217e2021-01-31 13:16:39 -0500511 }
512 size = 0;
513 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
514 for (int i = 0; i < numLevels; ++i) {
515 size_t minRowBytes = srcData[i].info().minRowBytes();
516 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
517 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
518 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
519 size += minRowBytes*srcData[i].height();
520 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500521 tempPixmaps[i] = srcData[i];
522 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500523 }
Brian Salomon759217e2021-01-31 13:16:39 -0500524
Brian Salomon05487ab2020-12-23 20:32:22 -0500525 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500526 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
527}
528
Adlai Holler98dd0042020-10-13 10:04:00 -0400529GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
530 const GrBackendFormat& backendFormat,
531 const SkColor4f& color,
532 GrMipmapped mipMapped,
533 GrRenderable renderable,
534 GrProtected isProtected,
535 GrGpuFinishedProc finishedProc,
536 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500537 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400538
539 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
540 if (this->abandoned()) {
541 return {};
542 }
543
544 GrGpu::BackendTextureData data(color);
545 return create_and_update_backend_texture(this, {width, height},
546 backendFormat, mipMapped, renderable, isProtected,
547 std::move(finishedCallback), &data);
548}
549
550GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
551 SkColorType skColorType,
552 const SkColor4f& color,
553 GrMipmapped mipMapped,
554 GrRenderable renderable,
555 GrProtected isProtected,
556 GrGpuFinishedProc finishedProc,
557 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500558 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400559
560 if (this->abandoned()) {
561 return {};
562 }
563
564 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
565 if (!format.isValid()) {
566 return {};
567 }
568
569 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
570 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
571
572 GrGpu::BackendTextureData data(swizzledColor);
573 return create_and_update_backend_texture(this, {width, height}, format,
574 mipMapped, renderable, isProtected,
575 std::move(finishedCallback), &data);
576}
577
578GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
579 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500580 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400581 GrRenderable renderable,
582 GrProtected isProtected,
583 GrGpuFinishedProc finishedProc,
584 GrGpuFinishedContext finishedContext) {
585 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
586
Brian Salomon694ff172020-11-04 16:54:28 -0500587 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400588
589 if (this->abandoned()) {
590 return {};
591 }
592
593 if (!srcData || numProvidedLevels <= 0) {
594 return {};
595 }
596
597 int baseWidth = srcData[0].width();
598 int baseHeight = srcData[0].height();
599 SkColorType colorType = srcData[0].colorType();
600
601 GrMipmapped mipMapped = GrMipmapped::kNo;
602 int numExpectedLevels = 1;
603 if (numProvidedLevels > 1) {
604 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
605 mipMapped = GrMipmapped::kYes;
606 }
607
608 if (numProvidedLevels != numExpectedLevels) {
609 return {};
610 }
611
612 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500613 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
614 srcData[0].height(),
615 backendFormat,
616 mipMapped,
617 renderable,
618 isProtected);
619 if (!beTex.isValid()) {
620 return {};
621 }
622 if (!update_texture_with_pixmaps(this->priv().getGpu(),
623 srcData,
624 numProvidedLevels,
625 beTex,
626 textureOrigin,
627 std::move(finishedCallback))) {
628 this->deleteBackendTexture(beTex);
629 return {};
630 }
631 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400632}
633
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400634bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
635 const SkColor4f& color,
636 GrGpuFinishedProc finishedProc,
637 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500638 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400639
640 if (this->abandoned()) {
641 return false;
642 }
643
644 GrGpu::BackendTextureData data(color);
645 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
646}
647
648bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
649 SkColorType skColorType,
650 const SkColor4f& color,
651 GrGpuFinishedProc finishedProc,
652 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500653 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400654
655 if (this->abandoned()) {
656 return false;
657 }
658
659 GrBackendFormat format = backendTexture.getBackendFormat();
660 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
661
662 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
663 return false;
664 }
665
666 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
667 GrGpu::BackendTextureData data(swizzle.applyTo(color));
668
669 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
670}
671
672bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
673 const SkPixmap srcData[],
674 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500675 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400676 GrGpuFinishedProc finishedProc,
677 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500678 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400679
680 if (this->abandoned()) {
681 return false;
682 }
683
684 if (!srcData || numLevels <= 0) {
685 return false;
686 }
687
688 int numExpectedLevels = 1;
689 if (backendTexture.hasMipmaps()) {
690 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
691 backendTexture.height()) + 1;
692 }
693 if (numLevels != numExpectedLevels) {
694 return false;
695 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500696 return update_texture_with_pixmaps(fGpu.get(),
697 srcData,
698 numLevels,
699 backendTexture,
700 textureOrigin,
701 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400702}
703
Adlai Holler64e13832020-10-13 08:21:56 -0400704//////////////////////////////////////////////////////////////////////////////
705
706static GrBackendTexture create_and_update_compressed_backend_texture(
707 GrDirectContext* dContext,
708 SkISize dimensions,
709 const GrBackendFormat& backendFormat,
710 GrMipmapped mipMapped,
711 GrProtected isProtected,
712 sk_sp<GrRefCntedCallback> finishedCallback,
713 const GrGpu::BackendTextureData* data) {
714 GrGpu* gpu = dContext->priv().getGpu();
715
716 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
717 mipMapped, isProtected);
718 if (!beTex.isValid()) {
719 return {};
720 }
721
722 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
723 beTex, std::move(finishedCallback), data)) {
724 dContext->deleteBackendTexture(beTex);
725 return {};
726 }
727 return beTex;
728}
729
730GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
731 const GrBackendFormat& backendFormat,
732 const SkColor4f& color,
733 GrMipmapped mipMapped,
734 GrProtected isProtected,
735 GrGpuFinishedProc finishedProc,
736 GrGpuFinishedContext finishedContext) {
737 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500738 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400739
740 if (this->abandoned()) {
741 return {};
742 }
743
744 GrGpu::BackendTextureData data(color);
745 return create_and_update_compressed_backend_texture(this, {width, height},
746 backendFormat, mipMapped, isProtected,
747 std::move(finishedCallback), &data);
748}
749
750GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
751 SkImage::CompressionType compression,
752 const SkColor4f& color,
753 GrMipmapped mipMapped,
754 GrProtected isProtected,
755 GrGpuFinishedProc finishedProc,
756 GrGpuFinishedContext finishedContext) {
757 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
758 GrBackendFormat format = this->compressedBackendFormat(compression);
759 return this->createCompressedBackendTexture(width, height, format, color,
760 mipMapped, isProtected, finishedProc,
761 finishedContext);
762}
763
764GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
765 const GrBackendFormat& backendFormat,
766 const void* compressedData,
767 size_t dataSize,
768 GrMipmapped mipMapped,
769 GrProtected isProtected,
770 GrGpuFinishedProc finishedProc,
771 GrGpuFinishedContext finishedContext) {
772 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500773 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400774
775 if (this->abandoned()) {
776 return {};
777 }
778
779 GrGpu::BackendTextureData data(compressedData, dataSize);
780 return create_and_update_compressed_backend_texture(this, {width, height},
781 backendFormat, mipMapped, isProtected,
782 std::move(finishedCallback), &data);
783}
784
785GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
786 SkImage::CompressionType compression,
787 const void* data, size_t dataSize,
788 GrMipmapped mipMapped,
789 GrProtected isProtected,
790 GrGpuFinishedProc finishedProc,
791 GrGpuFinishedContext finishedContext) {
792 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
793 GrBackendFormat format = this->compressedBackendFormat(compression);
794 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
795 isProtected, finishedProc, finishedContext);
796}
797
798bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
799 const SkColor4f& color,
800 GrGpuFinishedProc finishedProc,
801 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500802 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400803
804 if (this->abandoned()) {
805 return false;
806 }
807
808 GrGpu::BackendTextureData data(color);
809 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
810}
811
812bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
813 const void* compressedData,
814 size_t dataSize,
815 GrGpuFinishedProc finishedProc,
816 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500817 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400818
819 if (this->abandoned()) {
820 return false;
821 }
822
823 if (!compressedData) {
824 return false;
825 }
826
827 GrGpu::BackendTextureData data(compressedData, dataSize);
828
829 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
830}
831
Adlai Holler6d0745b2020-10-13 13:29:00 -0400832//////////////////////////////////////////////////////////////////////////////
833
834bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
835 const GrBackendSurfaceMutableState& state,
836 GrBackendSurfaceMutableState* previousState,
837 GrGpuFinishedProc finishedProc,
838 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500839 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400840
841 if (this->abandoned()) {
842 return false;
843 }
844
845 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
846}
847
848
849bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
850 const GrBackendSurfaceMutableState& state,
851 GrBackendSurfaceMutableState* previousState,
852 GrGpuFinishedProc finishedProc,
853 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500854 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400855
856 if (this->abandoned()) {
857 return false;
858 }
859
860 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
861 std::move(callback));
862}
863
864void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
865 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
866 // For the Vulkan backend we still must destroy the backend texture when the context is
867 // abandoned.
868 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
869 return;
870 }
871
872 fGpu->deleteBackendTexture(backendTex);
873}
874
875//////////////////////////////////////////////////////////////////////////////
876
877bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
878 return fGpu->precompileShader(key, data);
879}
880
881#ifdef SK_ENABLE_DUMP_GPU
882#include "include/core/SkString.h"
883#include "src/utils/SkJSONWriter.h"
884SkString GrDirectContext::dump() const {
885 SkDynamicMemoryWStream stream;
886 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
887 writer.beginObject();
888
889 writer.appendString("backend", GrBackendApiToStr(this->backend()));
890
891 writer.appendName("caps");
892 this->caps()->dumpJSON(&writer);
893
894 writer.appendName("gpu");
895 this->fGpu->dumpJSON(&writer);
896
897 writer.appendName("context");
898 this->dumpJSON(&writer);
899
900 // Flush JSON to the memory stream
901 writer.endObject();
902 writer.flush();
903
904 // Null terminate the JSON data in the memory stream
905 stream.write8(0);
906
907 // Allocate a string big enough to hold all the data, then copy out of the stream
908 SkString result(stream.bytesWritten());
909 stream.copyToAndReset(result.writable_str());
910 return result;
911}
912#endif
913
John Rosascoa9b348f2019-11-08 13:18:15 -0800914#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400915
Robert Phillipsf4f80112020-07-13 16:13:31 -0400916/*************************************************************************************************/
917sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500918 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500919 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500920}
921
Robert Phillipsf4f80112020-07-13 16:13:31 -0400922sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400923 return MakeGL(nullptr, options);
924}
925
Robert Phillipsf4f80112020-07-13 16:13:31 -0400926sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400927 GrContextOptions defaultOptions;
928 return MakeGL(nullptr, defaultOptions);
929}
930
Brian Salomon24069eb2020-06-24 10:19:52 -0400931#if GR_TEST_UTILS
932GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
933 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
934 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
935 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
936 // on the thing it captures. So we leak the context.
937 struct GetErrorContext {
938 SkRandom fRandom;
939 GrGLFunction<GrGLGetErrorFn> fGetError;
940 };
941
942 auto errorContext = new GetErrorContext;
943
944#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
945 __lsan_ignore_object(errorContext);
946#endif
947
948 errorContext->fGetError = original;
949
950 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
951 GrGLenum error = errorContext->fGetError();
952 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
953 error = GR_GL_OUT_OF_MEMORY;
954 }
955 return error;
956 });
957}
958#endif
959
Robert Phillipsf4f80112020-07-13 16:13:31 -0400960sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
961 const GrContextOptions& options) {
962 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400963#if GR_TEST_UTILS
964 if (options.fRandomGLOOM) {
965 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
966 copy->fFunctions.fGetError =
967 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
968#if GR_GL_CHECK_ERROR
969 // Suppress logging GL errors since we'll be synthetically generating them.
970 copy->suppressErrorLogging();
971#endif
972 glInterface = std::move(copy);
973 }
974#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400975 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
976 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500977 return nullptr;
978 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400979 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500980}
John Rosascoa9b348f2019-11-08 13:18:15 -0800981#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500982
Robert Phillipsf4f80112020-07-13 16:13:31 -0400983/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400984sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
985 GrContextOptions defaultOptions;
986 return MakeMock(mockOptions, defaultOptions);
987}
988
989sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
990 const GrContextOptions& options) {
991 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
992
993 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
994 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500995 return nullptr;
996 }
Chris Daltona378b452019-12-11 13:24:11 -0500997
Robert Phillipsf4f80112020-07-13 16:13:31 -0400998 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500999}
1000
Greg Danielb4d89562018-10-03 18:44:49 +00001001#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001002/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001003sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1004 GrContextOptions defaultOptions;
1005 return MakeVulkan(backendContext, defaultOptions);
1006}
1007
1008sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1009 const GrContextOptions& options) {
1010 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1011
1012 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1013 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001014 return nullptr;
1015 }
1016
Robert Phillipsf4f80112020-07-13 16:13:31 -04001017 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001018}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001019#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001020
1021#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001022/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001023sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001024 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001025 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001026}
1027
Jim Van Verth351c9b52020-11-12 15:21:11 -05001028sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1029 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001030 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001031
Jim Van Verth351c9b52020-11-12 15:21:11 -05001032 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001033 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001034 return nullptr;
1035 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001036
Robert Phillipsf4f80112020-07-13 16:13:31 -04001037 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001038}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001039
1040// deprecated
1041sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1042 GrContextOptions defaultOptions;
1043 return MakeMetal(device, queue, defaultOptions);
1044}
1045
1046// deprecated
1047// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1048sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1049 const GrContextOptions& options) {
1050 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1051 GrMtlBackendContext backendContext = {};
1052 backendContext.fDevice.reset(device);
1053 backendContext.fQueue.reset(queue);
1054
1055 return GrDirectContext::MakeMetal(backendContext, options);
1056}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001057#endif
1058
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001059#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001060/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001061sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1062 GrContextOptions defaultOptions;
1063 return MakeDirect3D(backendContext, defaultOptions);
1064}
1065
1066sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1067 const GrContextOptions& options) {
1068 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1069
1070 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1071 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001072 return nullptr;
1073 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001074
Robert Phillipsf4f80112020-07-13 16:13:31 -04001075 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001076}
1077#endif
1078
Stephen White985741a2019-07-18 11:43:45 -04001079#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001080/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001081sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001082 GrContextOptions defaultOptions;
1083 return MakeDawn(device, defaultOptions);
1084}
1085
Robert Phillipsf4f80112020-07-13 16:13:31 -04001086sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1087 const GrContextOptions& options) {
1088 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001089
Robert Phillipsf4f80112020-07-13 16:13:31 -04001090 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1091 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001092 return nullptr;
1093 }
1094
Robert Phillipsf4f80112020-07-13 16:13:31 -04001095 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001096}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001097
Stephen White985741a2019-07-18 11:43:45 -04001098#endif