blob: ab5c6cfbf1e7c420836a7fa629ca00e4eef457d1 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
Greg Daniela89b4302021-01-29 10:48:40 -050066 // We need to make sure all work is finished on the gpu before we start releasing resources.
67 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
68
Adlai Holler9555f292020-10-09 09:41:14 -040069 this->destroyDrawingManager();
70 fMappedBufferManager.reset();
71
72 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
73 if (fResourceCache) {
74 fResourceCache->releaseAll();
75 }
Robert Phillipsad248452020-06-30 09:27:52 -040076}
Robert Phillipsa3457b82018-03-08 11:30:12 -050077
Adlai Holler61a591c2020-10-12 12:38:33 -040078sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
79 return INHERITED::threadSafeProxy();
80}
81
Adlai Hollera7a40442020-10-09 09:49:42 -040082void GrDirectContext::resetGLTextureBindings() {
83 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
84 return;
85 }
86 fGpu->resetTextureBindings();
87}
88
89void GrDirectContext::resetContext(uint32_t state) {
90 ASSERT_SINGLE_OWNER
91 fGpu->markContextDirty(state);
92}
93
Robert Phillipsad248452020-06-30 09:27:52 -040094void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040095 if (INHERITED::abandoned()) {
96 return;
97 }
98
Robert Phillipsad248452020-06-30 09:27:52 -040099 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400100
Greg Daniela89b4302021-01-29 10:48:40 -0500101 // We need to make sure all work is finished on the gpu before we start releasing resources.
102 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
103
Adlai Hollera7a40442020-10-09 09:49:42 -0400104 fStrikeCache->freeAll();
105
106 fMappedBufferManager->abandon();
107
108 fResourceProvider->abandon();
109
Robert Phillipseb999bc2020-11-03 08:41:47 -0500110 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400111 fResourceCache->abandonAll();
112
113 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
114
115 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400116 if (fSmallPathAtlasMgr) {
117 fSmallPathAtlasMgr->reset();
118 }
Robert Phillipsad248452020-06-30 09:27:52 -0400119 fAtlasManager->freeAll();
120}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500121
Adlai Hollera7a40442020-10-09 09:49:42 -0400122bool GrDirectContext::abandoned() {
123 if (INHERITED::abandoned()) {
124 return true;
125 }
126
127 if (fGpu && fGpu->isDeviceLost()) {
128 this->abandonContext();
129 return true;
130 }
131 return false;
132}
133
Adlai Holler61a591c2020-10-12 12:38:33 -0400134bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
135
Robert Phillipsad248452020-06-30 09:27:52 -0400136void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400137 if (INHERITED::abandoned()) {
138 return;
139 }
140
141 INHERITED::abandonContext();
142
Greg Daniela89b4302021-01-29 10:48:40 -0500143 // We need to make sure all work is finished on the gpu before we start releasing resources.
144 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
145
Adlai Holler61a591c2020-10-12 12:38:33 -0400146 fMappedBufferManager.reset();
147
148 fResourceProvider->abandon();
149
150 // Release all resources in the backend 3D API.
151 fResourceCache->releaseAll();
152
153 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400154 if (fSmallPathAtlasMgr) {
155 fSmallPathAtlasMgr->reset();
156 }
Robert Phillipsad248452020-06-30 09:27:52 -0400157 fAtlasManager->freeAll();
158}
Robert Phillips6db27c22019-05-01 10:43:56 -0400159
Robert Phillipsad248452020-06-30 09:27:52 -0400160void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400161 ASSERT_SINGLE_OWNER
162
163 if (this->abandoned()) {
164 return;
165 }
166
Robert Phillipsad248452020-06-30 09:27:52 -0400167 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400168 if (fSmallPathAtlasMgr) {
169 fSmallPathAtlasMgr->reset();
170 }
Robert Phillipsad248452020-06-30 09:27:52 -0400171 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500172
Adlai Holler4aa4c602020-10-12 13:58:52 -0400173 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
174 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
175 fStrikeCache->freeAll();
176
177 this->drawingManager()->freeGpuResources();
178
179 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400180}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500181
Robert Phillipsad248452020-06-30 09:27:52 -0400182bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400183 ASSERT_SINGLE_OWNER
184 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400185 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500186 }
187
Adlai Holler9555f292020-10-09 09:41:14 -0400188 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400189 if (!INHERITED::init()) {
190 return false;
191 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500192
Adlai Holler9555f292020-10-09 09:41:14 -0400193 SkASSERT(this->getTextBlobCache());
194 SkASSERT(this->threadSafeCache());
195
196 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500197 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400198 fResourceCache->setProxyProvider(this->proxyProvider());
199 fResourceCache->setThreadSafeCache(this->threadSafeCache());
200 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
201 this->singleOwner());
202 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
203
204 fDidTestPMConversions = false;
205
206 // DDL TODO: we need to think through how the task group & persistent cache
207 // get passed on to/shared between all the DDLRecorders created with this context.
208 if (this->options().fExecutor) {
209 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
210 }
211
212 fPersistentCache = this->options().fPersistentCache;
213 fShaderErrorHandler = this->options().fShaderErrorHandler;
214 if (!fShaderErrorHandler) {
215 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
216 }
217
Robert Phillipsad248452020-06-30 09:27:52 -0400218 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
219 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
220 // multitexturing supported only if range can represent the index + texcoords fully
221 !(this->caps()->shaderCaps()->floatIs32Bits() ||
222 this->caps()->shaderCaps()->integerSupport())) {
223 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
224 } else {
225 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
226 }
227
228 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
229
Robert Phillips3262bc82020-08-10 12:11:58 -0400230 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
231 this->options().fGlyphCacheTextureMaximumBytes,
232 allowMultitexturing);
233 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400234
235 return true;
236}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500237
Adlai Holler3a508e92020-10-12 13:58:01 -0400238void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
239 ASSERT_SINGLE_OWNER
240
241 if (resourceCount) {
242 *resourceCount = fResourceCache->getBudgetedResourceCount();
243 }
244 if (resourceBytes) {
245 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
246 }
247}
248
249size_t GrDirectContext::getResourceCachePurgeableBytes() const {
250 ASSERT_SINGLE_OWNER
251 return fResourceCache->getPurgeableBytes();
252}
253
254void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
255 ASSERT_SINGLE_OWNER
256 if (maxResources) {
257 *maxResources = -1;
258 }
259 if (maxResourceBytes) {
260 *maxResourceBytes = this->getResourceCacheLimit();
261 }
262}
263
264size_t GrDirectContext::getResourceCacheLimit() const {
265 ASSERT_SINGLE_OWNER
266 return fResourceCache->getMaxResourceBytes();
267}
268
269void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
270 ASSERT_SINGLE_OWNER
271 this->setResourceCacheLimit(maxResourceBytes);
272}
273
274void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
275 ASSERT_SINGLE_OWNER
276 fResourceCache->setLimit(maxResourceBytes);
277}
278
Adlai Holler4aa4c602020-10-12 13:58:52 -0400279void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
280 ASSERT_SINGLE_OWNER
281
282 if (this->abandoned()) {
283 return;
284 }
285
286 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
287 fResourceCache->purgeAsNeeded();
288
289 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
290 // place to purge stale blobs
291 this->getTextBlobCache()->purgeStaleBlobs();
292}
293
294void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
295 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
296
297 ASSERT_SINGLE_OWNER
298
299 if (this->abandoned()) {
300 return;
301 }
302
303 this->checkAsyncWorkCompletion();
304 fMappedBufferManager->process();
305 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
306
307 fResourceCache->purgeAsNeeded();
308 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
309
310 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
311 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
312 }
313
314 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
315 // place to purge stale blobs
316 this->getTextBlobCache()->purgeStaleBlobs();
317}
318
319void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
320 ASSERT_SINGLE_OWNER
321
322 if (this->abandoned()) {
323 return;
324 }
325
326 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
327}
328
Adlai Holler3acc69a2020-10-13 08:20:51 -0400329////////////////////////////////////////////////////////////////////////////////
330bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
331 bool deleteSemaphoresAfterWait) {
332 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
333 return false;
334 }
335 GrWrapOwnership ownership =
336 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
337 for (int i = 0; i < numSemaphores; ++i) {
338 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
339 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
340 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
341 // to begin with. Therefore, it is fine to not wait on it.
342 if (sema) {
343 fGpu->waitSemaphore(sema.get());
344 }
345 }
346 return true;
347}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400348
Robert Phillips5edf5102020-08-10 16:30:36 -0400349GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400350 if (!fSmallPathAtlasMgr) {
351 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
352
353 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
354 }
355
356 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
357 return nullptr;
358 }
359
360 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400361}
362
Adlai Holler3acc69a2020-10-13 08:20:51 -0400363////////////////////////////////////////////////////////////////////////////////
364
365GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
366 ASSERT_SINGLE_OWNER
367 if (this->abandoned()) {
368 if (info.fFinishedProc) {
369 info.fFinishedProc(info.fFinishedContext);
370 }
371 if (info.fSubmittedProc) {
372 info.fSubmittedProc(info.fSubmittedContext, false);
373 }
374 return GrSemaphoresSubmitted::kNo;
375 }
376
Robert Phillips80bfda82020-11-12 09:23:36 -0500377 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
378 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400379}
380
381bool GrDirectContext::submit(bool syncCpu) {
382 ASSERT_SINGLE_OWNER
383 if (this->abandoned()) {
384 return false;
385 }
386
387 if (!fGpu) {
388 return false;
389 }
390
391 return fGpu->submitToGpu(syncCpu);
392}
393
394////////////////////////////////////////////////////////////////////////////////
395
396void GrDirectContext::checkAsyncWorkCompletion() {
397 if (fGpu) {
398 fGpu->checkFinishProcs();
399 }
400}
401
Greg Daniela89b4302021-01-29 10:48:40 -0500402void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
403 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
404 fGpu->finishOutstandingGpuWork();
405 this->checkAsyncWorkCompletion();
406 }
407}
408
Adlai Holler3acc69a2020-10-13 08:20:51 -0400409////////////////////////////////////////////////////////////////////////////////
410
411void GrDirectContext::storeVkPipelineCacheData() {
412 if (fGpu) {
413 fGpu->storeVkPipelineCacheData();
414 }
415}
416
417////////////////////////////////////////////////////////////////////////////////
418
419bool GrDirectContext::supportsDistanceFieldText() const {
420 return this->caps()->shaderCaps()->supportsDistanceFieldText();
421}
422
423//////////////////////////////////////////////////////////////////////////////
424
425void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
426 ASSERT_SINGLE_OWNER
427 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
428 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
429 this->getTextBlobCache()->usedBytes());
430}
431
432size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
433 bool useNextPow2) {
434 if (!image->isTextureBacked()) {
435 return 0;
436 }
437 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
438 GrTextureProxy* proxy = gpuImage->peekProxy();
439 if (!proxy) {
440 return 0;
441 }
442
443 int colorSamplesPerPixel = 1;
444 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
445 colorSamplesPerPixel, mipMapped, useNextPow2);
446}
447
Adlai Holler98dd0042020-10-13 10:04:00 -0400448GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
449 const GrBackendFormat& backendFormat,
450 GrMipmapped mipMapped,
451 GrRenderable renderable,
452 GrProtected isProtected) {
453 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
454 if (this->abandoned()) {
455 return GrBackendTexture();
456 }
457
458 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
459 mipMapped, isProtected);
460}
461
462GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
463 SkColorType skColorType,
464 GrMipmapped mipMapped,
465 GrRenderable renderable,
466 GrProtected isProtected) {
467 if (this->abandoned()) {
468 return GrBackendTexture();
469 }
470
471 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
472
473 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
474}
475
476static GrBackendTexture create_and_update_backend_texture(
477 GrDirectContext* dContext,
478 SkISize dimensions,
479 const GrBackendFormat& backendFormat,
480 GrMipmapped mipMapped,
481 GrRenderable renderable,
482 GrProtected isProtected,
483 sk_sp<GrRefCntedCallback> finishedCallback,
484 const GrGpu::BackendTextureData* data) {
485 GrGpu* gpu = dContext->priv().getGpu();
486
487 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
488 mipMapped, isProtected);
489 if (!beTex.isValid()) {
490 return {};
491 }
492
493 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
494 std::move(finishedCallback),
495 data)) {
496 dContext->deleteBackendTexture(beTex);
497 return {};
498 }
499 return beTex;
500}
501
Brian Salomonb5f880a2020-12-07 11:30:16 -0500502static bool update_texture_with_pixmaps(GrGpu* gpu,
503 const SkPixmap* srcData,
504 int numLevels,
505 const GrBackendTexture& backendTexture,
506 GrSurfaceOrigin textureOrigin,
507 sk_sp<GrRefCntedCallback> finishedCallback) {
508 std::unique_ptr<char[]> tempStorage;
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500509 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
Mike Klein09289632021-01-30 15:51:19 +0000510 if (textureOrigin == kBottomLeft_GrSurfaceOrigin) {
511 size_t size = 0;
512 for (int i = 0; i < numLevels; ++i) {
513 size += srcData[i].info().minRowBytes()*srcData[i].height();
514 }
515 tempStorage.reset(new char[size]);
516 size = 0;
517 for (int i = 0; i < numLevels; ++i) {
518 size_t tempRB = srcData[i].info().minRowBytes();
519 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, tempRB};
520 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], /*flip*/ true));
521 size += tempRB*srcData[i].height();
522 }
523 } else {
524 for (int i = 0; i < numLevels; ++i) {
Brian Salomon05487ab2020-12-23 20:32:22 -0500525 tempPixmaps[i] = srcData[i];
526 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500527 }
Brian Salomon05487ab2020-12-23 20:32:22 -0500528 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500529 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
530}
531
Adlai Holler98dd0042020-10-13 10:04:00 -0400532GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
533 const GrBackendFormat& backendFormat,
534 const SkColor4f& color,
535 GrMipmapped mipMapped,
536 GrRenderable renderable,
537 GrProtected isProtected,
538 GrGpuFinishedProc finishedProc,
539 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500540 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400541
542 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
543 if (this->abandoned()) {
544 return {};
545 }
546
547 GrGpu::BackendTextureData data(color);
548 return create_and_update_backend_texture(this, {width, height},
549 backendFormat, mipMapped, renderable, isProtected,
550 std::move(finishedCallback), &data);
551}
552
553GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
554 SkColorType skColorType,
555 const SkColor4f& color,
556 GrMipmapped mipMapped,
557 GrRenderable renderable,
558 GrProtected isProtected,
559 GrGpuFinishedProc finishedProc,
560 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500561 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400562
563 if (this->abandoned()) {
564 return {};
565 }
566
567 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
568 if (!format.isValid()) {
569 return {};
570 }
571
572 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
573 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
574
575 GrGpu::BackendTextureData data(swizzledColor);
576 return create_and_update_backend_texture(this, {width, height}, format,
577 mipMapped, renderable, isProtected,
578 std::move(finishedCallback), &data);
579}
580
581GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
582 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500583 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400584 GrRenderable renderable,
585 GrProtected isProtected,
586 GrGpuFinishedProc finishedProc,
587 GrGpuFinishedContext finishedContext) {
588 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
589
Brian Salomon694ff172020-11-04 16:54:28 -0500590 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400591
592 if (this->abandoned()) {
593 return {};
594 }
595
596 if (!srcData || numProvidedLevels <= 0) {
597 return {};
598 }
599
600 int baseWidth = srcData[0].width();
601 int baseHeight = srcData[0].height();
602 SkColorType colorType = srcData[0].colorType();
603
604 GrMipmapped mipMapped = GrMipmapped::kNo;
605 int numExpectedLevels = 1;
606 if (numProvidedLevels > 1) {
607 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
608 mipMapped = GrMipmapped::kYes;
609 }
610
611 if (numProvidedLevels != numExpectedLevels) {
612 return {};
613 }
614
615 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500616 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
617 srcData[0].height(),
618 backendFormat,
619 mipMapped,
620 renderable,
621 isProtected);
622 if (!beTex.isValid()) {
623 return {};
624 }
625 if (!update_texture_with_pixmaps(this->priv().getGpu(),
626 srcData,
627 numProvidedLevels,
628 beTex,
629 textureOrigin,
630 std::move(finishedCallback))) {
631 this->deleteBackendTexture(beTex);
632 return {};
633 }
634 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400635}
636
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400637bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
638 const SkColor4f& color,
639 GrGpuFinishedProc finishedProc,
640 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500641 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400642
643 if (this->abandoned()) {
644 return false;
645 }
646
647 GrGpu::BackendTextureData data(color);
648 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
649}
650
651bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
652 SkColorType skColorType,
653 const SkColor4f& color,
654 GrGpuFinishedProc finishedProc,
655 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500656 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400657
658 if (this->abandoned()) {
659 return false;
660 }
661
662 GrBackendFormat format = backendTexture.getBackendFormat();
663 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
664
665 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
666 return false;
667 }
668
669 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
670 GrGpu::BackendTextureData data(swizzle.applyTo(color));
671
672 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
673}
674
675bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
676 const SkPixmap srcData[],
677 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500678 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400679 GrGpuFinishedProc finishedProc,
680 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500681 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400682
683 if (this->abandoned()) {
684 return false;
685 }
686
687 if (!srcData || numLevels <= 0) {
688 return false;
689 }
690
691 int numExpectedLevels = 1;
692 if (backendTexture.hasMipmaps()) {
693 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
694 backendTexture.height()) + 1;
695 }
696 if (numLevels != numExpectedLevels) {
697 return false;
698 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500699 return update_texture_with_pixmaps(fGpu.get(),
700 srcData,
701 numLevels,
702 backendTexture,
703 textureOrigin,
704 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400705}
706
Adlai Holler64e13832020-10-13 08:21:56 -0400707//////////////////////////////////////////////////////////////////////////////
708
709static GrBackendTexture create_and_update_compressed_backend_texture(
710 GrDirectContext* dContext,
711 SkISize dimensions,
712 const GrBackendFormat& backendFormat,
713 GrMipmapped mipMapped,
714 GrProtected isProtected,
715 sk_sp<GrRefCntedCallback> finishedCallback,
716 const GrGpu::BackendTextureData* data) {
717 GrGpu* gpu = dContext->priv().getGpu();
718
719 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
720 mipMapped, isProtected);
721 if (!beTex.isValid()) {
722 return {};
723 }
724
725 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
726 beTex, std::move(finishedCallback), data)) {
727 dContext->deleteBackendTexture(beTex);
728 return {};
729 }
730 return beTex;
731}
732
733GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
734 const GrBackendFormat& backendFormat,
735 const SkColor4f& color,
736 GrMipmapped mipMapped,
737 GrProtected isProtected,
738 GrGpuFinishedProc finishedProc,
739 GrGpuFinishedContext finishedContext) {
740 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500741 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400742
743 if (this->abandoned()) {
744 return {};
745 }
746
747 GrGpu::BackendTextureData data(color);
748 return create_and_update_compressed_backend_texture(this, {width, height},
749 backendFormat, mipMapped, isProtected,
750 std::move(finishedCallback), &data);
751}
752
753GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
754 SkImage::CompressionType compression,
755 const SkColor4f& color,
756 GrMipmapped mipMapped,
757 GrProtected isProtected,
758 GrGpuFinishedProc finishedProc,
759 GrGpuFinishedContext finishedContext) {
760 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
761 GrBackendFormat format = this->compressedBackendFormat(compression);
762 return this->createCompressedBackendTexture(width, height, format, color,
763 mipMapped, isProtected, finishedProc,
764 finishedContext);
765}
766
767GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
768 const GrBackendFormat& backendFormat,
769 const void* compressedData,
770 size_t dataSize,
771 GrMipmapped mipMapped,
772 GrProtected isProtected,
773 GrGpuFinishedProc finishedProc,
774 GrGpuFinishedContext finishedContext) {
775 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500776 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400777
778 if (this->abandoned()) {
779 return {};
780 }
781
782 GrGpu::BackendTextureData data(compressedData, dataSize);
783 return create_and_update_compressed_backend_texture(this, {width, height},
784 backendFormat, mipMapped, isProtected,
785 std::move(finishedCallback), &data);
786}
787
788GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
789 SkImage::CompressionType compression,
790 const void* data, size_t dataSize,
791 GrMipmapped mipMapped,
792 GrProtected isProtected,
793 GrGpuFinishedProc finishedProc,
794 GrGpuFinishedContext finishedContext) {
795 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
796 GrBackendFormat format = this->compressedBackendFormat(compression);
797 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
798 isProtected, finishedProc, finishedContext);
799}
800
801bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
802 const SkColor4f& color,
803 GrGpuFinishedProc finishedProc,
804 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500805 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400806
807 if (this->abandoned()) {
808 return false;
809 }
810
811 GrGpu::BackendTextureData data(color);
812 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
813}
814
815bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
816 const void* compressedData,
817 size_t dataSize,
818 GrGpuFinishedProc finishedProc,
819 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500820 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400821
822 if (this->abandoned()) {
823 return false;
824 }
825
826 if (!compressedData) {
827 return false;
828 }
829
830 GrGpu::BackendTextureData data(compressedData, dataSize);
831
832 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
833}
834
Adlai Holler6d0745b2020-10-13 13:29:00 -0400835//////////////////////////////////////////////////////////////////////////////
836
837bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
838 const GrBackendSurfaceMutableState& state,
839 GrBackendSurfaceMutableState* previousState,
840 GrGpuFinishedProc finishedProc,
841 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500842 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400843
844 if (this->abandoned()) {
845 return false;
846 }
847
848 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
849}
850
851
852bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
853 const GrBackendSurfaceMutableState& state,
854 GrBackendSurfaceMutableState* previousState,
855 GrGpuFinishedProc finishedProc,
856 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500857 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400858
859 if (this->abandoned()) {
860 return false;
861 }
862
863 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
864 std::move(callback));
865}
866
867void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
868 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
869 // For the Vulkan backend we still must destroy the backend texture when the context is
870 // abandoned.
871 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
872 return;
873 }
874
875 fGpu->deleteBackendTexture(backendTex);
876}
877
878//////////////////////////////////////////////////////////////////////////////
879
880bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
881 return fGpu->precompileShader(key, data);
882}
883
884#ifdef SK_ENABLE_DUMP_GPU
885#include "include/core/SkString.h"
886#include "src/utils/SkJSONWriter.h"
887SkString GrDirectContext::dump() const {
888 SkDynamicMemoryWStream stream;
889 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
890 writer.beginObject();
891
892 writer.appendString("backend", GrBackendApiToStr(this->backend()));
893
894 writer.appendName("caps");
895 this->caps()->dumpJSON(&writer);
896
897 writer.appendName("gpu");
898 this->fGpu->dumpJSON(&writer);
899
900 writer.appendName("context");
901 this->dumpJSON(&writer);
902
903 // Flush JSON to the memory stream
904 writer.endObject();
905 writer.flush();
906
907 // Null terminate the JSON data in the memory stream
908 stream.write8(0);
909
910 // Allocate a string big enough to hold all the data, then copy out of the stream
911 SkString result(stream.bytesWritten());
912 stream.copyToAndReset(result.writable_str());
913 return result;
914}
915#endif
916
John Rosascoa9b348f2019-11-08 13:18:15 -0800917#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400918
Robert Phillipsf4f80112020-07-13 16:13:31 -0400919/*************************************************************************************************/
920sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500921 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500922 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500923}
924
Robert Phillipsf4f80112020-07-13 16:13:31 -0400925sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400926 return MakeGL(nullptr, options);
927}
928
Robert Phillipsf4f80112020-07-13 16:13:31 -0400929sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400930 GrContextOptions defaultOptions;
931 return MakeGL(nullptr, defaultOptions);
932}
933
Brian Salomon24069eb2020-06-24 10:19:52 -0400934#if GR_TEST_UTILS
935GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
936 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
937 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
938 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
939 // on the thing it captures. So we leak the context.
940 struct GetErrorContext {
941 SkRandom fRandom;
942 GrGLFunction<GrGLGetErrorFn> fGetError;
943 };
944
945 auto errorContext = new GetErrorContext;
946
947#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
948 __lsan_ignore_object(errorContext);
949#endif
950
951 errorContext->fGetError = original;
952
953 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
954 GrGLenum error = errorContext->fGetError();
955 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
956 error = GR_GL_OUT_OF_MEMORY;
957 }
958 return error;
959 });
960}
961#endif
962
Robert Phillipsf4f80112020-07-13 16:13:31 -0400963sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
964 const GrContextOptions& options) {
965 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400966#if GR_TEST_UTILS
967 if (options.fRandomGLOOM) {
968 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
969 copy->fFunctions.fGetError =
970 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
971#if GR_GL_CHECK_ERROR
972 // Suppress logging GL errors since we'll be synthetically generating them.
973 copy->suppressErrorLogging();
974#endif
975 glInterface = std::move(copy);
976 }
977#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400978 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
979 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500980 return nullptr;
981 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400982 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500983}
John Rosascoa9b348f2019-11-08 13:18:15 -0800984#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500985
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400987sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
988 GrContextOptions defaultOptions;
989 return MakeMock(mockOptions, defaultOptions);
990}
991
992sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
993 const GrContextOptions& options) {
994 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
995
996 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
997 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500998 return nullptr;
999 }
Chris Daltona378b452019-12-11 13:24:11 -05001000
Robert Phillipsf4f80112020-07-13 16:13:31 -04001001 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001002}
1003
Greg Danielb4d89562018-10-03 18:44:49 +00001004#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001005/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001006sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1007 GrContextOptions defaultOptions;
1008 return MakeVulkan(backendContext, defaultOptions);
1009}
1010
1011sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1012 const GrContextOptions& options) {
1013 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1014
1015 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1016 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001017 return nullptr;
1018 }
1019
Robert Phillipsf4f80112020-07-13 16:13:31 -04001020 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001021}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001022#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001023
1024#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001025/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001026sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001027 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001028 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001029}
1030
Jim Van Verth351c9b52020-11-12 15:21:11 -05001031sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1032 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001033 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001034
Jim Van Verth351c9b52020-11-12 15:21:11 -05001035 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001036 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001037 return nullptr;
1038 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001039
Robert Phillipsf4f80112020-07-13 16:13:31 -04001040 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001041}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001042
1043// deprecated
1044sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1045 GrContextOptions defaultOptions;
1046 return MakeMetal(device, queue, defaultOptions);
1047}
1048
1049// deprecated
1050// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1051sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1052 const GrContextOptions& options) {
1053 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1054 GrMtlBackendContext backendContext = {};
1055 backendContext.fDevice.reset(device);
1056 backendContext.fQueue.reset(queue);
1057
1058 return GrDirectContext::MakeMetal(backendContext, options);
1059}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001060#endif
1061
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001062#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001063/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001064sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1065 GrContextOptions defaultOptions;
1066 return MakeDirect3D(backendContext, defaultOptions);
1067}
1068
1069sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1070 const GrContextOptions& options) {
1071 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1072
1073 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1074 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001075 return nullptr;
1076 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001077
Robert Phillipsf4f80112020-07-13 16:13:31 -04001078 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001079}
1080#endif
1081
Stephen White985741a2019-07-18 11:43:45 -04001082#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001083/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001084sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001085 GrContextOptions defaultOptions;
1086 return MakeDawn(device, defaultOptions);
1087}
1088
Robert Phillipsf4f80112020-07-13 16:13:31 -04001089sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1090 const GrContextOptions& options) {
1091 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001092
Robert Phillipsf4f80112020-07-13 16:13:31 -04001093 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1094 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001095 return nullptr;
1096 }
1097
Robert Phillipsf4f80112020-07-13 16:13:31 -04001098 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001099}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001100
Stephen White985741a2019-07-18 11:43:45 -04001101#endif