blob: f67b6e393737be9819d247e147de384e3ebb7809 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
66 this->destroyDrawingManager();
67 fMappedBufferManager.reset();
68
69 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
70 if (fResourceCache) {
71 fResourceCache->releaseAll();
72 }
Robert Phillipsad248452020-06-30 09:27:52 -040073}
Robert Phillipsa3457b82018-03-08 11:30:12 -050074
Adlai Holler61a591c2020-10-12 12:38:33 -040075sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
76 return INHERITED::threadSafeProxy();
77}
78
Adlai Hollera7a40442020-10-09 09:49:42 -040079void GrDirectContext::resetGLTextureBindings() {
80 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
81 return;
82 }
83 fGpu->resetTextureBindings();
84}
85
86void GrDirectContext::resetContext(uint32_t state) {
87 ASSERT_SINGLE_OWNER
88 fGpu->markContextDirty(state);
89}
90
Robert Phillipsad248452020-06-30 09:27:52 -040091void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040092 if (INHERITED::abandoned()) {
93 return;
94 }
95
Robert Phillipsad248452020-06-30 09:27:52 -040096 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -040097
98 fStrikeCache->freeAll();
99
100 fMappedBufferManager->abandon();
101
102 fResourceProvider->abandon();
103
Robert Phillipseb999bc2020-11-03 08:41:47 -0500104 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400105 fResourceCache->abandonAll();
106
107 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
108
109 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400110 if (fSmallPathAtlasMgr) {
111 fSmallPathAtlasMgr->reset();
112 }
Robert Phillipsad248452020-06-30 09:27:52 -0400113 fAtlasManager->freeAll();
114}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116bool GrDirectContext::abandoned() {
117 if (INHERITED::abandoned()) {
118 return true;
119 }
120
121 if (fGpu && fGpu->isDeviceLost()) {
122 this->abandonContext();
123 return true;
124 }
125 return false;
126}
127
Adlai Holler61a591c2020-10-12 12:38:33 -0400128bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
129
Robert Phillipsad248452020-06-30 09:27:52 -0400130void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400131 if (INHERITED::abandoned()) {
132 return;
133 }
134
135 INHERITED::abandonContext();
136
137 fMappedBufferManager.reset();
138
139 fResourceProvider->abandon();
140
141 // Release all resources in the backend 3D API.
142 fResourceCache->releaseAll();
143
144 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400145 if (fSmallPathAtlasMgr) {
146 fSmallPathAtlasMgr->reset();
147 }
Robert Phillipsad248452020-06-30 09:27:52 -0400148 fAtlasManager->freeAll();
149}
Robert Phillips6db27c22019-05-01 10:43:56 -0400150
Robert Phillipsad248452020-06-30 09:27:52 -0400151void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400152 ASSERT_SINGLE_OWNER
153
154 if (this->abandoned()) {
155 return;
156 }
157
Robert Phillipsad248452020-06-30 09:27:52 -0400158 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400159 if (fSmallPathAtlasMgr) {
160 fSmallPathAtlasMgr->reset();
161 }
Robert Phillipsad248452020-06-30 09:27:52 -0400162 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500163
Adlai Holler4aa4c602020-10-12 13:58:52 -0400164 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
165 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
166 fStrikeCache->freeAll();
167
168 this->drawingManager()->freeGpuResources();
169
170 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400171}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500172
Robert Phillipsad248452020-06-30 09:27:52 -0400173bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400174 ASSERT_SINGLE_OWNER
175 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400176 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500177 }
178
Adlai Holler9555f292020-10-09 09:41:14 -0400179 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400180 if (!INHERITED::init()) {
181 return false;
182 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500183
Adlai Holler9555f292020-10-09 09:41:14 -0400184 SkASSERT(this->getTextBlobCache());
185 SkASSERT(this->threadSafeCache());
186
187 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500188 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400189 fResourceCache->setProxyProvider(this->proxyProvider());
190 fResourceCache->setThreadSafeCache(this->threadSafeCache());
191 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
192 this->singleOwner());
193 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
194
195 fDidTestPMConversions = false;
196
197 // DDL TODO: we need to think through how the task group & persistent cache
198 // get passed on to/shared between all the DDLRecorders created with this context.
199 if (this->options().fExecutor) {
200 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
201 }
202
203 fPersistentCache = this->options().fPersistentCache;
204 fShaderErrorHandler = this->options().fShaderErrorHandler;
205 if (!fShaderErrorHandler) {
206 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
207 }
208
Robert Phillipsad248452020-06-30 09:27:52 -0400209 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
210 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
211 // multitexturing supported only if range can represent the index + texcoords fully
212 !(this->caps()->shaderCaps()->floatIs32Bits() ||
213 this->caps()->shaderCaps()->integerSupport())) {
214 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
215 } else {
216 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
217 }
218
219 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
220
Robert Phillips3262bc82020-08-10 12:11:58 -0400221 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
222 this->options().fGlyphCacheTextureMaximumBytes,
223 allowMultitexturing);
224 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400225
226 return true;
227}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500228
Adlai Holler3a508e92020-10-12 13:58:01 -0400229void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
230 ASSERT_SINGLE_OWNER
231
232 if (resourceCount) {
233 *resourceCount = fResourceCache->getBudgetedResourceCount();
234 }
235 if (resourceBytes) {
236 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
237 }
238}
239
240size_t GrDirectContext::getResourceCachePurgeableBytes() const {
241 ASSERT_SINGLE_OWNER
242 return fResourceCache->getPurgeableBytes();
243}
244
245void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
246 ASSERT_SINGLE_OWNER
247 if (maxResources) {
248 *maxResources = -1;
249 }
250 if (maxResourceBytes) {
251 *maxResourceBytes = this->getResourceCacheLimit();
252 }
253}
254
255size_t GrDirectContext::getResourceCacheLimit() const {
256 ASSERT_SINGLE_OWNER
257 return fResourceCache->getMaxResourceBytes();
258}
259
260void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
261 ASSERT_SINGLE_OWNER
262 this->setResourceCacheLimit(maxResourceBytes);
263}
264
265void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
266 ASSERT_SINGLE_OWNER
267 fResourceCache->setLimit(maxResourceBytes);
268}
269
Adlai Holler4aa4c602020-10-12 13:58:52 -0400270void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
271 ASSERT_SINGLE_OWNER
272
273 if (this->abandoned()) {
274 return;
275 }
276
277 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
278 fResourceCache->purgeAsNeeded();
279
280 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
281 // place to purge stale blobs
282 this->getTextBlobCache()->purgeStaleBlobs();
283}
284
285void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
286 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
287
288 ASSERT_SINGLE_OWNER
289
290 if (this->abandoned()) {
291 return;
292 }
293
294 this->checkAsyncWorkCompletion();
295 fMappedBufferManager->process();
296 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
297
298 fResourceCache->purgeAsNeeded();
299 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
300
301 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
302 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
303 }
304
305 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
306 // place to purge stale blobs
307 this->getTextBlobCache()->purgeStaleBlobs();
308}
309
310void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
311 ASSERT_SINGLE_OWNER
312
313 if (this->abandoned()) {
314 return;
315 }
316
317 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
318}
319
Adlai Holler3acc69a2020-10-13 08:20:51 -0400320////////////////////////////////////////////////////////////////////////////////
321bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
322 bool deleteSemaphoresAfterWait) {
323 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
324 return false;
325 }
326 GrWrapOwnership ownership =
327 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
328 for (int i = 0; i < numSemaphores; ++i) {
329 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
330 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
331 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
332 // to begin with. Therefore, it is fine to not wait on it.
333 if (sema) {
334 fGpu->waitSemaphore(sema.get());
335 }
336 }
337 return true;
338}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400339
Robert Phillips5edf5102020-08-10 16:30:36 -0400340GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400341 if (!fSmallPathAtlasMgr) {
342 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
343
344 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
345 }
346
347 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
348 return nullptr;
349 }
350
351 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400352}
353
Adlai Holler3acc69a2020-10-13 08:20:51 -0400354////////////////////////////////////////////////////////////////////////////////
355
356GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
357 ASSERT_SINGLE_OWNER
358 if (this->abandoned()) {
359 if (info.fFinishedProc) {
360 info.fFinishedProc(info.fFinishedContext);
361 }
362 if (info.fSubmittedProc) {
363 info.fSubmittedProc(info.fSubmittedContext, false);
364 }
365 return GrSemaphoresSubmitted::kNo;
366 }
367
Robert Phillips80bfda82020-11-12 09:23:36 -0500368 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
369 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400370}
371
372bool GrDirectContext::submit(bool syncCpu) {
373 ASSERT_SINGLE_OWNER
374 if (this->abandoned()) {
375 return false;
376 }
377
378 if (!fGpu) {
379 return false;
380 }
381
382 return fGpu->submitToGpu(syncCpu);
383}
384
385////////////////////////////////////////////////////////////////////////////////
386
387void GrDirectContext::checkAsyncWorkCompletion() {
388 if (fGpu) {
389 fGpu->checkFinishProcs();
390 }
391}
392
393////////////////////////////////////////////////////////////////////////////////
394
395void GrDirectContext::storeVkPipelineCacheData() {
396 if (fGpu) {
397 fGpu->storeVkPipelineCacheData();
398 }
399}
400
401////////////////////////////////////////////////////////////////////////////////
402
403bool GrDirectContext::supportsDistanceFieldText() const {
404 return this->caps()->shaderCaps()->supportsDistanceFieldText();
405}
406
407//////////////////////////////////////////////////////////////////////////////
408
409void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
410 ASSERT_SINGLE_OWNER
411 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
412 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
413 this->getTextBlobCache()->usedBytes());
414}
415
416size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
417 bool useNextPow2) {
418 if (!image->isTextureBacked()) {
419 return 0;
420 }
421 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
422 GrTextureProxy* proxy = gpuImage->peekProxy();
423 if (!proxy) {
424 return 0;
425 }
426
427 int colorSamplesPerPixel = 1;
428 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
429 colorSamplesPerPixel, mipMapped, useNextPow2);
430}
431
Adlai Holler98dd0042020-10-13 10:04:00 -0400432GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
433 const GrBackendFormat& backendFormat,
434 GrMipmapped mipMapped,
435 GrRenderable renderable,
436 GrProtected isProtected) {
437 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
438 if (this->abandoned()) {
439 return GrBackendTexture();
440 }
441
442 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
443 mipMapped, isProtected);
444}
445
446GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
447 SkColorType skColorType,
448 GrMipmapped mipMapped,
449 GrRenderable renderable,
450 GrProtected isProtected) {
451 if (this->abandoned()) {
452 return GrBackendTexture();
453 }
454
455 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
456
457 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
458}
459
460static GrBackendTexture create_and_update_backend_texture(
461 GrDirectContext* dContext,
462 SkISize dimensions,
463 const GrBackendFormat& backendFormat,
464 GrMipmapped mipMapped,
465 GrRenderable renderable,
466 GrProtected isProtected,
467 sk_sp<GrRefCntedCallback> finishedCallback,
468 const GrGpu::BackendTextureData* data) {
469 GrGpu* gpu = dContext->priv().getGpu();
470
471 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
472 mipMapped, isProtected);
473 if (!beTex.isValid()) {
474 return {};
475 }
476
477 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
478 std::move(finishedCallback),
479 data)) {
480 dContext->deleteBackendTexture(beTex);
481 return {};
482 }
483 return beTex;
484}
485
Brian Salomonb5f880a2020-12-07 11:30:16 -0500486static bool update_texture_with_pixmaps(GrGpu* gpu,
487 const SkPixmap* srcData,
488 int numLevels,
489 const GrBackendTexture& backendTexture,
490 GrSurfaceOrigin textureOrigin,
491 sk_sp<GrRefCntedCallback> finishedCallback) {
492 std::unique_ptr<char[]> tempStorage;
Brian Salomon05487ab2020-12-23 20:32:22 -0500493 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500494 if (textureOrigin == kBottomLeft_GrSurfaceOrigin) {
495 size_t size = 0;
496 for (int i = 0; i < numLevels; ++i) {
497 size += srcData[i].info().minRowBytes()*srcData[i].height();
498 }
499 tempStorage.reset(new char[size]);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500500 size = 0;
501 for (int i = 0; i < numLevels; ++i) {
502 size_t tempRB = srcData[i].info().minRowBytes();
Brian Salomon05487ab2020-12-23 20:32:22 -0500503 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, tempRB};
Brian Salomonb5f880a2020-12-07 11:30:16 -0500504 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], /*flip*/ true));
505 size += tempRB*srcData[i].height();
506 }
Brian Salomon05487ab2020-12-23 20:32:22 -0500507 } else {
508 for (int i = 0; i < numLevels; ++i) {
509 tempPixmaps[i] = srcData[i];
510 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500511 }
Brian Salomon05487ab2020-12-23 20:32:22 -0500512 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500513 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
514}
515
Adlai Holler98dd0042020-10-13 10:04:00 -0400516GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
517 const GrBackendFormat& backendFormat,
518 const SkColor4f& color,
519 GrMipmapped mipMapped,
520 GrRenderable renderable,
521 GrProtected isProtected,
522 GrGpuFinishedProc finishedProc,
523 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500524 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400525
526 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
527 if (this->abandoned()) {
528 return {};
529 }
530
531 GrGpu::BackendTextureData data(color);
532 return create_and_update_backend_texture(this, {width, height},
533 backendFormat, mipMapped, renderable, isProtected,
534 std::move(finishedCallback), &data);
535}
536
537GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
538 SkColorType skColorType,
539 const SkColor4f& color,
540 GrMipmapped mipMapped,
541 GrRenderable renderable,
542 GrProtected isProtected,
543 GrGpuFinishedProc finishedProc,
544 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500545 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400546
547 if (this->abandoned()) {
548 return {};
549 }
550
551 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
552 if (!format.isValid()) {
553 return {};
554 }
555
556 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
557 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
558
559 GrGpu::BackendTextureData data(swizzledColor);
560 return create_and_update_backend_texture(this, {width, height}, format,
561 mipMapped, renderable, isProtected,
562 std::move(finishedCallback), &data);
563}
564
565GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
566 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500567 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400568 GrRenderable renderable,
569 GrProtected isProtected,
570 GrGpuFinishedProc finishedProc,
571 GrGpuFinishedContext finishedContext) {
572 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
573
Brian Salomon694ff172020-11-04 16:54:28 -0500574 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400575
576 if (this->abandoned()) {
577 return {};
578 }
579
580 if (!srcData || numProvidedLevels <= 0) {
581 return {};
582 }
583
584 int baseWidth = srcData[0].width();
585 int baseHeight = srcData[0].height();
586 SkColorType colorType = srcData[0].colorType();
587
588 GrMipmapped mipMapped = GrMipmapped::kNo;
589 int numExpectedLevels = 1;
590 if (numProvidedLevels > 1) {
591 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
592 mipMapped = GrMipmapped::kYes;
593 }
594
595 if (numProvidedLevels != numExpectedLevels) {
596 return {};
597 }
598
599 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500600 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
601 srcData[0].height(),
602 backendFormat,
603 mipMapped,
604 renderable,
605 isProtected);
606 if (!beTex.isValid()) {
607 return {};
608 }
609 if (!update_texture_with_pixmaps(this->priv().getGpu(),
610 srcData,
611 numProvidedLevels,
612 beTex,
613 textureOrigin,
614 std::move(finishedCallback))) {
615 this->deleteBackendTexture(beTex);
616 return {};
617 }
618 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400619}
620
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400621bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
622 const SkColor4f& color,
623 GrGpuFinishedProc finishedProc,
624 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500625 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400626
627 if (this->abandoned()) {
628 return false;
629 }
630
631 GrGpu::BackendTextureData data(color);
632 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
633}
634
635bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
636 SkColorType skColorType,
637 const SkColor4f& color,
638 GrGpuFinishedProc finishedProc,
639 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500640 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400641
642 if (this->abandoned()) {
643 return false;
644 }
645
646 GrBackendFormat format = backendTexture.getBackendFormat();
647 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
648
649 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
650 return false;
651 }
652
653 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
654 GrGpu::BackendTextureData data(swizzle.applyTo(color));
655
656 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
657}
658
659bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
660 const SkPixmap srcData[],
661 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500662 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400663 GrGpuFinishedProc finishedProc,
664 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500665 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400666
667 if (this->abandoned()) {
668 return false;
669 }
670
671 if (!srcData || numLevels <= 0) {
672 return false;
673 }
674
675 int numExpectedLevels = 1;
676 if (backendTexture.hasMipmaps()) {
677 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
678 backendTexture.height()) + 1;
679 }
680 if (numLevels != numExpectedLevels) {
681 return false;
682 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500683 return update_texture_with_pixmaps(fGpu.get(),
684 srcData,
685 numLevels,
686 backendTexture,
687 textureOrigin,
688 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400689}
690
Adlai Holler64e13832020-10-13 08:21:56 -0400691//////////////////////////////////////////////////////////////////////////////
692
693static GrBackendTexture create_and_update_compressed_backend_texture(
694 GrDirectContext* dContext,
695 SkISize dimensions,
696 const GrBackendFormat& backendFormat,
697 GrMipmapped mipMapped,
698 GrProtected isProtected,
699 sk_sp<GrRefCntedCallback> finishedCallback,
700 const GrGpu::BackendTextureData* data) {
701 GrGpu* gpu = dContext->priv().getGpu();
702
703 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
704 mipMapped, isProtected);
705 if (!beTex.isValid()) {
706 return {};
707 }
708
709 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
710 beTex, std::move(finishedCallback), data)) {
711 dContext->deleteBackendTexture(beTex);
712 return {};
713 }
714 return beTex;
715}
716
717GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
718 const GrBackendFormat& backendFormat,
719 const SkColor4f& color,
720 GrMipmapped mipMapped,
721 GrProtected isProtected,
722 GrGpuFinishedProc finishedProc,
723 GrGpuFinishedContext finishedContext) {
724 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500725 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400726
727 if (this->abandoned()) {
728 return {};
729 }
730
731 GrGpu::BackendTextureData data(color);
732 return create_and_update_compressed_backend_texture(this, {width, height},
733 backendFormat, mipMapped, isProtected,
734 std::move(finishedCallback), &data);
735}
736
737GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
738 SkImage::CompressionType compression,
739 const SkColor4f& color,
740 GrMipmapped mipMapped,
741 GrProtected isProtected,
742 GrGpuFinishedProc finishedProc,
743 GrGpuFinishedContext finishedContext) {
744 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
745 GrBackendFormat format = this->compressedBackendFormat(compression);
746 return this->createCompressedBackendTexture(width, height, format, color,
747 mipMapped, isProtected, finishedProc,
748 finishedContext);
749}
750
751GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
752 const GrBackendFormat& backendFormat,
753 const void* compressedData,
754 size_t dataSize,
755 GrMipmapped mipMapped,
756 GrProtected isProtected,
757 GrGpuFinishedProc finishedProc,
758 GrGpuFinishedContext finishedContext) {
759 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500760 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400761
762 if (this->abandoned()) {
763 return {};
764 }
765
766 GrGpu::BackendTextureData data(compressedData, dataSize);
767 return create_and_update_compressed_backend_texture(this, {width, height},
768 backendFormat, mipMapped, isProtected,
769 std::move(finishedCallback), &data);
770}
771
772GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
773 SkImage::CompressionType compression,
774 const void* data, size_t dataSize,
775 GrMipmapped mipMapped,
776 GrProtected isProtected,
777 GrGpuFinishedProc finishedProc,
778 GrGpuFinishedContext finishedContext) {
779 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
780 GrBackendFormat format = this->compressedBackendFormat(compression);
781 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
782 isProtected, finishedProc, finishedContext);
783}
784
785bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
786 const SkColor4f& color,
787 GrGpuFinishedProc finishedProc,
788 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500789 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400790
791 if (this->abandoned()) {
792 return false;
793 }
794
795 GrGpu::BackendTextureData data(color);
796 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
797}
798
799bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
800 const void* compressedData,
801 size_t dataSize,
802 GrGpuFinishedProc finishedProc,
803 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500804 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400805
806 if (this->abandoned()) {
807 return false;
808 }
809
810 if (!compressedData) {
811 return false;
812 }
813
814 GrGpu::BackendTextureData data(compressedData, dataSize);
815
816 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
817}
818
Adlai Holler6d0745b2020-10-13 13:29:00 -0400819//////////////////////////////////////////////////////////////////////////////
820
821bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
822 const GrBackendSurfaceMutableState& state,
823 GrBackendSurfaceMutableState* previousState,
824 GrGpuFinishedProc finishedProc,
825 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500826 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400827
828 if (this->abandoned()) {
829 return false;
830 }
831
832 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
833}
834
835
836bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
837 const GrBackendSurfaceMutableState& state,
838 GrBackendSurfaceMutableState* previousState,
839 GrGpuFinishedProc finishedProc,
840 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500841 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400842
843 if (this->abandoned()) {
844 return false;
845 }
846
847 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
848 std::move(callback));
849}
850
851void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
852 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
853 // For the Vulkan backend we still must destroy the backend texture when the context is
854 // abandoned.
855 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
856 return;
857 }
858
859 fGpu->deleteBackendTexture(backendTex);
860}
861
862//////////////////////////////////////////////////////////////////////////////
863
864bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
865 return fGpu->precompileShader(key, data);
866}
867
868#ifdef SK_ENABLE_DUMP_GPU
869#include "include/core/SkString.h"
870#include "src/utils/SkJSONWriter.h"
871SkString GrDirectContext::dump() const {
872 SkDynamicMemoryWStream stream;
873 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
874 writer.beginObject();
875
876 writer.appendString("backend", GrBackendApiToStr(this->backend()));
877
878 writer.appendName("caps");
879 this->caps()->dumpJSON(&writer);
880
881 writer.appendName("gpu");
882 this->fGpu->dumpJSON(&writer);
883
884 writer.appendName("context");
885 this->dumpJSON(&writer);
886
887 // Flush JSON to the memory stream
888 writer.endObject();
889 writer.flush();
890
891 // Null terminate the JSON data in the memory stream
892 stream.write8(0);
893
894 // Allocate a string big enough to hold all the data, then copy out of the stream
895 SkString result(stream.bytesWritten());
896 stream.copyToAndReset(result.writable_str());
897 return result;
898}
899#endif
900
John Rosascoa9b348f2019-11-08 13:18:15 -0800901#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400902
Robert Phillipsf4f80112020-07-13 16:13:31 -0400903/*************************************************************************************************/
904sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500905 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500906 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500907}
908
Robert Phillipsf4f80112020-07-13 16:13:31 -0400909sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400910 return MakeGL(nullptr, options);
911}
912
Robert Phillipsf4f80112020-07-13 16:13:31 -0400913sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400914 GrContextOptions defaultOptions;
915 return MakeGL(nullptr, defaultOptions);
916}
917
Brian Salomon24069eb2020-06-24 10:19:52 -0400918#if GR_TEST_UTILS
919GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
920 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
921 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
922 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
923 // on the thing it captures. So we leak the context.
924 struct GetErrorContext {
925 SkRandom fRandom;
926 GrGLFunction<GrGLGetErrorFn> fGetError;
927 };
928
929 auto errorContext = new GetErrorContext;
930
931#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
932 __lsan_ignore_object(errorContext);
933#endif
934
935 errorContext->fGetError = original;
936
937 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
938 GrGLenum error = errorContext->fGetError();
939 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
940 error = GR_GL_OUT_OF_MEMORY;
941 }
942 return error;
943 });
944}
945#endif
946
Robert Phillipsf4f80112020-07-13 16:13:31 -0400947sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
948 const GrContextOptions& options) {
949 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400950#if GR_TEST_UTILS
951 if (options.fRandomGLOOM) {
952 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
953 copy->fFunctions.fGetError =
954 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
955#if GR_GL_CHECK_ERROR
956 // Suppress logging GL errors since we'll be synthetically generating them.
957 copy->suppressErrorLogging();
958#endif
959 glInterface = std::move(copy);
960 }
961#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400962 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
963 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500964 return nullptr;
965 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400966 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500967}
John Rosascoa9b348f2019-11-08 13:18:15 -0800968#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500969
Robert Phillipsf4f80112020-07-13 16:13:31 -0400970/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400971sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
972 GrContextOptions defaultOptions;
973 return MakeMock(mockOptions, defaultOptions);
974}
975
976sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
977 const GrContextOptions& options) {
978 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
979
980 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
981 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500982 return nullptr;
983 }
Chris Daltona378b452019-12-11 13:24:11 -0500984
Robert Phillipsf4f80112020-07-13 16:13:31 -0400985 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500986}
987
Greg Danielb4d89562018-10-03 18:44:49 +0000988#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -0400989/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
991 GrContextOptions defaultOptions;
992 return MakeVulkan(backendContext, defaultOptions);
993}
994
995sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
996 const GrContextOptions& options) {
997 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
998
999 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1000 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001001 return nullptr;
1002 }
1003
Robert Phillipsf4f80112020-07-13 16:13:31 -04001004 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001005}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001006#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001007
1008#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001009/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001010sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001011 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001012 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001013}
1014
Jim Van Verth351c9b52020-11-12 15:21:11 -05001015sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1016 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001017 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001018
Jim Van Verth351c9b52020-11-12 15:21:11 -05001019 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001020 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001021 return nullptr;
1022 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001023
Robert Phillipsf4f80112020-07-13 16:13:31 -04001024 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001025}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001026
1027// deprecated
1028sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1029 GrContextOptions defaultOptions;
1030 return MakeMetal(device, queue, defaultOptions);
1031}
1032
1033// deprecated
1034// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1035sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1036 const GrContextOptions& options) {
1037 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1038 GrMtlBackendContext backendContext = {};
1039 backendContext.fDevice.reset(device);
1040 backendContext.fQueue.reset(queue);
1041
1042 return GrDirectContext::MakeMetal(backendContext, options);
1043}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001044#endif
1045
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001046#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001047/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001048sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1049 GrContextOptions defaultOptions;
1050 return MakeDirect3D(backendContext, defaultOptions);
1051}
1052
1053sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1054 const GrContextOptions& options) {
1055 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1056
1057 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1058 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001059 return nullptr;
1060 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001061
Robert Phillipsf4f80112020-07-13 16:13:31 -04001062 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001063}
1064#endif
1065
Stephen White985741a2019-07-18 11:43:45 -04001066#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001067/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001068sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001069 GrContextOptions defaultOptions;
1070 return MakeDawn(device, defaultOptions);
1071}
1072
Robert Phillipsf4f80112020-07-13 16:13:31 -04001073sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1074 const GrContextOptions& options) {
1075 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001076
Robert Phillipsf4f80112020-07-13 16:13:31 -04001077 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1078 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001079 return nullptr;
1080 }
1081
Robert Phillipsf4f80112020-07-13 16:13:31 -04001082 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001083}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001084
Stephen White985741a2019-07-18 11:43:45 -04001085#endif