blob: 1324a466afd04baf0189ad2a42159d7fcb3fe105 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
66 this->destroyDrawingManager();
67 fMappedBufferManager.reset();
68
69 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
70 if (fResourceCache) {
71 fResourceCache->releaseAll();
72 }
Robert Phillipsad248452020-06-30 09:27:52 -040073}
Robert Phillipsa3457b82018-03-08 11:30:12 -050074
Adlai Holler61a591c2020-10-12 12:38:33 -040075sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
76 return INHERITED::threadSafeProxy();
77}
78
Adlai Hollera7a40442020-10-09 09:49:42 -040079void GrDirectContext::resetGLTextureBindings() {
80 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
81 return;
82 }
83 fGpu->resetTextureBindings();
84}
85
86void GrDirectContext::resetContext(uint32_t state) {
87 ASSERT_SINGLE_OWNER
88 fGpu->markContextDirty(state);
89}
90
Robert Phillipsad248452020-06-30 09:27:52 -040091void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040092 if (INHERITED::abandoned()) {
93 return;
94 }
95
Robert Phillipsad248452020-06-30 09:27:52 -040096 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -040097
98 fStrikeCache->freeAll();
99
100 fMappedBufferManager->abandon();
101
102 fResourceProvider->abandon();
103
Robert Phillipseb999bc2020-11-03 08:41:47 -0500104 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400105 fResourceCache->abandonAll();
106
107 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
108
109 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400110 if (fSmallPathAtlasMgr) {
111 fSmallPathAtlasMgr->reset();
112 }
Robert Phillipsad248452020-06-30 09:27:52 -0400113 fAtlasManager->freeAll();
114}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116bool GrDirectContext::abandoned() {
117 if (INHERITED::abandoned()) {
118 return true;
119 }
120
121 if (fGpu && fGpu->isDeviceLost()) {
122 this->abandonContext();
123 return true;
124 }
125 return false;
126}
127
Adlai Holler61a591c2020-10-12 12:38:33 -0400128bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
129
Robert Phillipsad248452020-06-30 09:27:52 -0400130void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400131 if (INHERITED::abandoned()) {
132 return;
133 }
134
135 INHERITED::abandonContext();
136
137 fMappedBufferManager.reset();
138
139 fResourceProvider->abandon();
140
141 // Release all resources in the backend 3D API.
142 fResourceCache->releaseAll();
143
144 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400145 if (fSmallPathAtlasMgr) {
146 fSmallPathAtlasMgr->reset();
147 }
Robert Phillipsad248452020-06-30 09:27:52 -0400148 fAtlasManager->freeAll();
149}
Robert Phillips6db27c22019-05-01 10:43:56 -0400150
Robert Phillipsad248452020-06-30 09:27:52 -0400151void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400152 ASSERT_SINGLE_OWNER
153
154 if (this->abandoned()) {
155 return;
156 }
157
Robert Phillipsad248452020-06-30 09:27:52 -0400158 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400159 if (fSmallPathAtlasMgr) {
160 fSmallPathAtlasMgr->reset();
161 }
Robert Phillipsad248452020-06-30 09:27:52 -0400162 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500163
Adlai Holler4aa4c602020-10-12 13:58:52 -0400164 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
165 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
166 fStrikeCache->freeAll();
167
168 this->drawingManager()->freeGpuResources();
169
170 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400171}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500172
Robert Phillipsad248452020-06-30 09:27:52 -0400173bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400174 ASSERT_SINGLE_OWNER
175 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400176 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500177 }
178
Adlai Holler9555f292020-10-09 09:41:14 -0400179 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400180 if (!INHERITED::init()) {
181 return false;
182 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500183
Adlai Holler9555f292020-10-09 09:41:14 -0400184 SkASSERT(this->getTextBlobCache());
185 SkASSERT(this->threadSafeCache());
186
187 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomon1eea1ea2021-01-26 18:12:25 +0000188 fResourceCache = std::make_unique<GrResourceCache>(this->caps(), this->singleOwner(),
189 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400190 fResourceCache->setProxyProvider(this->proxyProvider());
191 fResourceCache->setThreadSafeCache(this->threadSafeCache());
192 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
193 this->singleOwner());
194 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
195
196 fDidTestPMConversions = false;
197
198 // DDL TODO: we need to think through how the task group & persistent cache
199 // get passed on to/shared between all the DDLRecorders created with this context.
200 if (this->options().fExecutor) {
201 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
202 }
203
204 fPersistentCache = this->options().fPersistentCache;
205 fShaderErrorHandler = this->options().fShaderErrorHandler;
206 if (!fShaderErrorHandler) {
207 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
208 }
209
Robert Phillipsad248452020-06-30 09:27:52 -0400210 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
211 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
212 // multitexturing supported only if range can represent the index + texcoords fully
213 !(this->caps()->shaderCaps()->floatIs32Bits() ||
214 this->caps()->shaderCaps()->integerSupport())) {
215 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
216 } else {
217 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
218 }
219
220 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
221
Robert Phillips3262bc82020-08-10 12:11:58 -0400222 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
223 this->options().fGlyphCacheTextureMaximumBytes,
224 allowMultitexturing);
225 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400226
227 return true;
228}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500229
Adlai Holler3a508e92020-10-12 13:58:01 -0400230void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
231 ASSERT_SINGLE_OWNER
232
233 if (resourceCount) {
234 *resourceCount = fResourceCache->getBudgetedResourceCount();
235 }
236 if (resourceBytes) {
237 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
238 }
239}
240
241size_t GrDirectContext::getResourceCachePurgeableBytes() const {
242 ASSERT_SINGLE_OWNER
243 return fResourceCache->getPurgeableBytes();
244}
245
246void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
247 ASSERT_SINGLE_OWNER
248 if (maxResources) {
249 *maxResources = -1;
250 }
251 if (maxResourceBytes) {
252 *maxResourceBytes = this->getResourceCacheLimit();
253 }
254}
255
256size_t GrDirectContext::getResourceCacheLimit() const {
257 ASSERT_SINGLE_OWNER
258 return fResourceCache->getMaxResourceBytes();
259}
260
261void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
262 ASSERT_SINGLE_OWNER
263 this->setResourceCacheLimit(maxResourceBytes);
264}
265
266void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
267 ASSERT_SINGLE_OWNER
268 fResourceCache->setLimit(maxResourceBytes);
269}
270
Adlai Holler4aa4c602020-10-12 13:58:52 -0400271void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
272 ASSERT_SINGLE_OWNER
273
274 if (this->abandoned()) {
275 return;
276 }
277
278 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
279 fResourceCache->purgeAsNeeded();
280
281 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
282 // place to purge stale blobs
283 this->getTextBlobCache()->purgeStaleBlobs();
284}
285
286void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
287 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
288
289 ASSERT_SINGLE_OWNER
290
291 if (this->abandoned()) {
292 return;
293 }
294
295 this->checkAsyncWorkCompletion();
296 fMappedBufferManager->process();
297 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
298
299 fResourceCache->purgeAsNeeded();
300 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
301
302 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
303 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
304 }
305
306 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
307 // place to purge stale blobs
308 this->getTextBlobCache()->purgeStaleBlobs();
309}
310
311void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
312 ASSERT_SINGLE_OWNER
313
314 if (this->abandoned()) {
315 return;
316 }
317
318 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
319}
320
Adlai Holler3acc69a2020-10-13 08:20:51 -0400321////////////////////////////////////////////////////////////////////////////////
322bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
323 bool deleteSemaphoresAfterWait) {
324 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
325 return false;
326 }
327 GrWrapOwnership ownership =
328 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
329 for (int i = 0; i < numSemaphores; ++i) {
330 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
331 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
332 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
333 // to begin with. Therefore, it is fine to not wait on it.
334 if (sema) {
335 fGpu->waitSemaphore(sema.get());
336 }
337 }
338 return true;
339}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400340
Robert Phillips5edf5102020-08-10 16:30:36 -0400341GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400342 if (!fSmallPathAtlasMgr) {
343 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
344
345 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
346 }
347
348 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
349 return nullptr;
350 }
351
352 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400353}
354
Adlai Holler3acc69a2020-10-13 08:20:51 -0400355////////////////////////////////////////////////////////////////////////////////
356
357GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
358 ASSERT_SINGLE_OWNER
359 if (this->abandoned()) {
360 if (info.fFinishedProc) {
361 info.fFinishedProc(info.fFinishedContext);
362 }
363 if (info.fSubmittedProc) {
364 info.fSubmittedProc(info.fSubmittedContext, false);
365 }
366 return GrSemaphoresSubmitted::kNo;
367 }
368
Robert Phillips80bfda82020-11-12 09:23:36 -0500369 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
370 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400371}
372
373bool GrDirectContext::submit(bool syncCpu) {
374 ASSERT_SINGLE_OWNER
375 if (this->abandoned()) {
376 return false;
377 }
378
379 if (!fGpu) {
380 return false;
381 }
382
383 return fGpu->submitToGpu(syncCpu);
384}
385
386////////////////////////////////////////////////////////////////////////////////
387
388void GrDirectContext::checkAsyncWorkCompletion() {
389 if (fGpu) {
390 fGpu->checkFinishProcs();
391 }
392}
393
394////////////////////////////////////////////////////////////////////////////////
395
396void GrDirectContext::storeVkPipelineCacheData() {
397 if (fGpu) {
398 fGpu->storeVkPipelineCacheData();
399 }
400}
401
402////////////////////////////////////////////////////////////////////////////////
403
404bool GrDirectContext::supportsDistanceFieldText() const {
405 return this->caps()->shaderCaps()->supportsDistanceFieldText();
406}
407
408//////////////////////////////////////////////////////////////////////////////
409
410void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
411 ASSERT_SINGLE_OWNER
412 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
413 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
414 this->getTextBlobCache()->usedBytes());
415}
416
417size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
418 bool useNextPow2) {
419 if (!image->isTextureBacked()) {
420 return 0;
421 }
422 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
423 GrTextureProxy* proxy = gpuImage->peekProxy();
424 if (!proxy) {
425 return 0;
426 }
427
428 int colorSamplesPerPixel = 1;
429 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
430 colorSamplesPerPixel, mipMapped, useNextPow2);
431}
432
Adlai Holler98dd0042020-10-13 10:04:00 -0400433GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
434 const GrBackendFormat& backendFormat,
435 GrMipmapped mipMapped,
436 GrRenderable renderable,
437 GrProtected isProtected) {
438 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
439 if (this->abandoned()) {
440 return GrBackendTexture();
441 }
442
443 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
444 mipMapped, isProtected);
445}
446
447GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
448 SkColorType skColorType,
449 GrMipmapped mipMapped,
450 GrRenderable renderable,
451 GrProtected isProtected) {
452 if (this->abandoned()) {
453 return GrBackendTexture();
454 }
455
456 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
457
458 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
459}
460
461static GrBackendTexture create_and_update_backend_texture(
462 GrDirectContext* dContext,
463 SkISize dimensions,
464 const GrBackendFormat& backendFormat,
465 GrMipmapped mipMapped,
466 GrRenderable renderable,
467 GrProtected isProtected,
468 sk_sp<GrRefCntedCallback> finishedCallback,
469 const GrGpu::BackendTextureData* data) {
470 GrGpu* gpu = dContext->priv().getGpu();
471
472 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
473 mipMapped, isProtected);
474 if (!beTex.isValid()) {
475 return {};
476 }
477
478 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
479 std::move(finishedCallback),
480 data)) {
481 dContext->deleteBackendTexture(beTex);
482 return {};
483 }
484 return beTex;
485}
486
Brian Salomonb5f880a2020-12-07 11:30:16 -0500487static bool update_texture_with_pixmaps(GrGpu* gpu,
488 const SkPixmap* srcData,
489 int numLevels,
490 const GrBackendTexture& backendTexture,
491 GrSurfaceOrigin textureOrigin,
492 sk_sp<GrRefCntedCallback> finishedCallback) {
493 std::unique_ptr<char[]> tempStorage;
Brian Salomon05487ab2020-12-23 20:32:22 -0500494 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500495 if (textureOrigin == kBottomLeft_GrSurfaceOrigin) {
496 size_t size = 0;
497 for (int i = 0; i < numLevels; ++i) {
498 size += srcData[i].info().minRowBytes()*srcData[i].height();
499 }
500 tempStorage.reset(new char[size]);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500501 size = 0;
502 for (int i = 0; i < numLevels; ++i) {
503 size_t tempRB = srcData[i].info().minRowBytes();
Brian Salomon05487ab2020-12-23 20:32:22 -0500504 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, tempRB};
Brian Salomonb5f880a2020-12-07 11:30:16 -0500505 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], /*flip*/ true));
506 size += tempRB*srcData[i].height();
507 }
Brian Salomon05487ab2020-12-23 20:32:22 -0500508 } else {
509 for (int i = 0; i < numLevels; ++i) {
510 tempPixmaps[i] = srcData[i];
511 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500512 }
Brian Salomon05487ab2020-12-23 20:32:22 -0500513 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500514 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
515}
516
Adlai Holler98dd0042020-10-13 10:04:00 -0400517GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
518 const GrBackendFormat& backendFormat,
519 const SkColor4f& color,
520 GrMipmapped mipMapped,
521 GrRenderable renderable,
522 GrProtected isProtected,
523 GrGpuFinishedProc finishedProc,
524 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500525 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400526
527 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
528 if (this->abandoned()) {
529 return {};
530 }
531
532 GrGpu::BackendTextureData data(color);
533 return create_and_update_backend_texture(this, {width, height},
534 backendFormat, mipMapped, renderable, isProtected,
535 std::move(finishedCallback), &data);
536}
537
538GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
539 SkColorType skColorType,
540 const SkColor4f& color,
541 GrMipmapped mipMapped,
542 GrRenderable renderable,
543 GrProtected isProtected,
544 GrGpuFinishedProc finishedProc,
545 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500546 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400547
548 if (this->abandoned()) {
549 return {};
550 }
551
552 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
553 if (!format.isValid()) {
554 return {};
555 }
556
557 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
558 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
559
560 GrGpu::BackendTextureData data(swizzledColor);
561 return create_and_update_backend_texture(this, {width, height}, format,
562 mipMapped, renderable, isProtected,
563 std::move(finishedCallback), &data);
564}
565
566GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
567 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500568 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400569 GrRenderable renderable,
570 GrProtected isProtected,
571 GrGpuFinishedProc finishedProc,
572 GrGpuFinishedContext finishedContext) {
573 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
574
Brian Salomon694ff172020-11-04 16:54:28 -0500575 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400576
577 if (this->abandoned()) {
578 return {};
579 }
580
581 if (!srcData || numProvidedLevels <= 0) {
582 return {};
583 }
584
585 int baseWidth = srcData[0].width();
586 int baseHeight = srcData[0].height();
587 SkColorType colorType = srcData[0].colorType();
588
589 GrMipmapped mipMapped = GrMipmapped::kNo;
590 int numExpectedLevels = 1;
591 if (numProvidedLevels > 1) {
592 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
593 mipMapped = GrMipmapped::kYes;
594 }
595
596 if (numProvidedLevels != numExpectedLevels) {
597 return {};
598 }
599
600 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500601 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
602 srcData[0].height(),
603 backendFormat,
604 mipMapped,
605 renderable,
606 isProtected);
607 if (!beTex.isValid()) {
608 return {};
609 }
610 if (!update_texture_with_pixmaps(this->priv().getGpu(),
611 srcData,
612 numProvidedLevels,
613 beTex,
614 textureOrigin,
615 std::move(finishedCallback))) {
616 this->deleteBackendTexture(beTex);
617 return {};
618 }
619 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400620}
621
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400622bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
623 const SkColor4f& color,
624 GrGpuFinishedProc finishedProc,
625 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500626 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400627
628 if (this->abandoned()) {
629 return false;
630 }
631
632 GrGpu::BackendTextureData data(color);
633 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
634}
635
636bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
637 SkColorType skColorType,
638 const SkColor4f& color,
639 GrGpuFinishedProc finishedProc,
640 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500641 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400642
643 if (this->abandoned()) {
644 return false;
645 }
646
647 GrBackendFormat format = backendTexture.getBackendFormat();
648 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
649
650 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
651 return false;
652 }
653
654 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
655 GrGpu::BackendTextureData data(swizzle.applyTo(color));
656
657 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
658}
659
660bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
661 const SkPixmap srcData[],
662 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500663 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400664 GrGpuFinishedProc finishedProc,
665 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500666 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400667
668 if (this->abandoned()) {
669 return false;
670 }
671
672 if (!srcData || numLevels <= 0) {
673 return false;
674 }
675
676 int numExpectedLevels = 1;
677 if (backendTexture.hasMipmaps()) {
678 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
679 backendTexture.height()) + 1;
680 }
681 if (numLevels != numExpectedLevels) {
682 return false;
683 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500684 return update_texture_with_pixmaps(fGpu.get(),
685 srcData,
686 numLevels,
687 backendTexture,
688 textureOrigin,
689 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400690}
691
Adlai Holler64e13832020-10-13 08:21:56 -0400692//////////////////////////////////////////////////////////////////////////////
693
694static GrBackendTexture create_and_update_compressed_backend_texture(
695 GrDirectContext* dContext,
696 SkISize dimensions,
697 const GrBackendFormat& backendFormat,
698 GrMipmapped mipMapped,
699 GrProtected isProtected,
700 sk_sp<GrRefCntedCallback> finishedCallback,
701 const GrGpu::BackendTextureData* data) {
702 GrGpu* gpu = dContext->priv().getGpu();
703
704 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
705 mipMapped, isProtected);
706 if (!beTex.isValid()) {
707 return {};
708 }
709
710 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
711 beTex, std::move(finishedCallback), data)) {
712 dContext->deleteBackendTexture(beTex);
713 return {};
714 }
715 return beTex;
716}
717
718GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
719 const GrBackendFormat& backendFormat,
720 const SkColor4f& color,
721 GrMipmapped mipMapped,
722 GrProtected isProtected,
723 GrGpuFinishedProc finishedProc,
724 GrGpuFinishedContext finishedContext) {
725 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500726 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400727
728 if (this->abandoned()) {
729 return {};
730 }
731
732 GrGpu::BackendTextureData data(color);
733 return create_and_update_compressed_backend_texture(this, {width, height},
734 backendFormat, mipMapped, isProtected,
735 std::move(finishedCallback), &data);
736}
737
738GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
739 SkImage::CompressionType compression,
740 const SkColor4f& color,
741 GrMipmapped mipMapped,
742 GrProtected isProtected,
743 GrGpuFinishedProc finishedProc,
744 GrGpuFinishedContext finishedContext) {
745 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
746 GrBackendFormat format = this->compressedBackendFormat(compression);
747 return this->createCompressedBackendTexture(width, height, format, color,
748 mipMapped, isProtected, finishedProc,
749 finishedContext);
750}
751
752GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
753 const GrBackendFormat& backendFormat,
754 const void* compressedData,
755 size_t dataSize,
756 GrMipmapped mipMapped,
757 GrProtected isProtected,
758 GrGpuFinishedProc finishedProc,
759 GrGpuFinishedContext finishedContext) {
760 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500761 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400762
763 if (this->abandoned()) {
764 return {};
765 }
766
767 GrGpu::BackendTextureData data(compressedData, dataSize);
768 return create_and_update_compressed_backend_texture(this, {width, height},
769 backendFormat, mipMapped, isProtected,
770 std::move(finishedCallback), &data);
771}
772
773GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
774 SkImage::CompressionType compression,
775 const void* data, size_t dataSize,
776 GrMipmapped mipMapped,
777 GrProtected isProtected,
778 GrGpuFinishedProc finishedProc,
779 GrGpuFinishedContext finishedContext) {
780 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
781 GrBackendFormat format = this->compressedBackendFormat(compression);
782 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
783 isProtected, finishedProc, finishedContext);
784}
785
786bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
787 const SkColor4f& color,
788 GrGpuFinishedProc finishedProc,
789 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500790 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400791
792 if (this->abandoned()) {
793 return false;
794 }
795
796 GrGpu::BackendTextureData data(color);
797 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
798}
799
800bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
801 const void* compressedData,
802 size_t dataSize,
803 GrGpuFinishedProc finishedProc,
804 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500805 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400806
807 if (this->abandoned()) {
808 return false;
809 }
810
811 if (!compressedData) {
812 return false;
813 }
814
815 GrGpu::BackendTextureData data(compressedData, dataSize);
816
817 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
818}
819
Adlai Holler6d0745b2020-10-13 13:29:00 -0400820//////////////////////////////////////////////////////////////////////////////
821
822bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
823 const GrBackendSurfaceMutableState& state,
824 GrBackendSurfaceMutableState* previousState,
825 GrGpuFinishedProc finishedProc,
826 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500827 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400828
829 if (this->abandoned()) {
830 return false;
831 }
832
833 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
834}
835
836
837bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
838 const GrBackendSurfaceMutableState& state,
839 GrBackendSurfaceMutableState* previousState,
840 GrGpuFinishedProc finishedProc,
841 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500842 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400843
844 if (this->abandoned()) {
845 return false;
846 }
847
848 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
849 std::move(callback));
850}
851
852void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
853 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
854 // For the Vulkan backend we still must destroy the backend texture when the context is
855 // abandoned.
856 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
857 return;
858 }
859
860 fGpu->deleteBackendTexture(backendTex);
861}
862
863//////////////////////////////////////////////////////////////////////////////
864
865bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
866 return fGpu->precompileShader(key, data);
867}
868
869#ifdef SK_ENABLE_DUMP_GPU
870#include "include/core/SkString.h"
871#include "src/utils/SkJSONWriter.h"
872SkString GrDirectContext::dump() const {
873 SkDynamicMemoryWStream stream;
874 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
875 writer.beginObject();
876
877 writer.appendString("backend", GrBackendApiToStr(this->backend()));
878
879 writer.appendName("caps");
880 this->caps()->dumpJSON(&writer);
881
882 writer.appendName("gpu");
883 this->fGpu->dumpJSON(&writer);
884
885 writer.appendName("context");
886 this->dumpJSON(&writer);
887
888 // Flush JSON to the memory stream
889 writer.endObject();
890 writer.flush();
891
892 // Null terminate the JSON data in the memory stream
893 stream.write8(0);
894
895 // Allocate a string big enough to hold all the data, then copy out of the stream
896 SkString result(stream.bytesWritten());
897 stream.copyToAndReset(result.writable_str());
898 return result;
899}
900#endif
901
John Rosascoa9b348f2019-11-08 13:18:15 -0800902#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400903
Robert Phillipsf4f80112020-07-13 16:13:31 -0400904/*************************************************************************************************/
905sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500906 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500907 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500908}
909
Robert Phillipsf4f80112020-07-13 16:13:31 -0400910sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400911 return MakeGL(nullptr, options);
912}
913
Robert Phillipsf4f80112020-07-13 16:13:31 -0400914sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400915 GrContextOptions defaultOptions;
916 return MakeGL(nullptr, defaultOptions);
917}
918
Brian Salomon24069eb2020-06-24 10:19:52 -0400919#if GR_TEST_UTILS
920GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
921 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
922 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
923 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
924 // on the thing it captures. So we leak the context.
925 struct GetErrorContext {
926 SkRandom fRandom;
927 GrGLFunction<GrGLGetErrorFn> fGetError;
928 };
929
930 auto errorContext = new GetErrorContext;
931
932#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
933 __lsan_ignore_object(errorContext);
934#endif
935
936 errorContext->fGetError = original;
937
938 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
939 GrGLenum error = errorContext->fGetError();
940 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
941 error = GR_GL_OUT_OF_MEMORY;
942 }
943 return error;
944 });
945}
946#endif
947
Robert Phillipsf4f80112020-07-13 16:13:31 -0400948sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
949 const GrContextOptions& options) {
950 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400951#if GR_TEST_UTILS
952 if (options.fRandomGLOOM) {
953 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
954 copy->fFunctions.fGetError =
955 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
956#if GR_GL_CHECK_ERROR
957 // Suppress logging GL errors since we'll be synthetically generating them.
958 copy->suppressErrorLogging();
959#endif
960 glInterface = std::move(copy);
961 }
962#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400963 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
964 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500965 return nullptr;
966 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400967 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500968}
John Rosascoa9b348f2019-11-08 13:18:15 -0800969#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500970
Robert Phillipsf4f80112020-07-13 16:13:31 -0400971/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400972sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
973 GrContextOptions defaultOptions;
974 return MakeMock(mockOptions, defaultOptions);
975}
976
977sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
978 const GrContextOptions& options) {
979 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
980
981 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
982 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500983 return nullptr;
984 }
Chris Daltona378b452019-12-11 13:24:11 -0500985
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500987}
988
Greg Danielb4d89562018-10-03 18:44:49 +0000989#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400991sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
992 GrContextOptions defaultOptions;
993 return MakeVulkan(backendContext, defaultOptions);
994}
995
996sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
997 const GrContextOptions& options) {
998 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
999
1000 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1001 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001002 return nullptr;
1003 }
1004
Robert Phillipsf4f80112020-07-13 16:13:31 -04001005 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001006}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001007#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001008
1009#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001010/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001011sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001012 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001013 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001014}
1015
Jim Van Verth351c9b52020-11-12 15:21:11 -05001016sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1017 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001018 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001019
Jim Van Verth351c9b52020-11-12 15:21:11 -05001020 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001021 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001022 return nullptr;
1023 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001024
Robert Phillipsf4f80112020-07-13 16:13:31 -04001025 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001026}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001027
1028// deprecated
1029sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1030 GrContextOptions defaultOptions;
1031 return MakeMetal(device, queue, defaultOptions);
1032}
1033
1034// deprecated
1035// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1036sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1037 const GrContextOptions& options) {
1038 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1039 GrMtlBackendContext backendContext = {};
1040 backendContext.fDevice.reset(device);
1041 backendContext.fQueue.reset(queue);
1042
1043 return GrDirectContext::MakeMetal(backendContext, options);
1044}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001045#endif
1046
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001047#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001048/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001049sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1050 GrContextOptions defaultOptions;
1051 return MakeDirect3D(backendContext, defaultOptions);
1052}
1053
1054sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1055 const GrContextOptions& options) {
1056 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1057
1058 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1059 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001060 return nullptr;
1061 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001062
Robert Phillipsf4f80112020-07-13 16:13:31 -04001063 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001064}
1065#endif
1066
Stephen White985741a2019-07-18 11:43:45 -04001067#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001068/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001069sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001070 GrContextOptions defaultOptions;
1071 return MakeDawn(device, defaultOptions);
1072}
1073
Robert Phillipsf4f80112020-07-13 16:13:31 -04001074sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1075 const GrContextOptions& options) {
1076 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001077
Robert Phillipsf4f80112020-07-13 16:13:31 -04001078 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1079 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001080 return nullptr;
1081 }
1082
Robert Phillipsf4f80112020-07-13 16:13:31 -04001083 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001084}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001085
Stephen White985741a2019-07-18 11:43:45 -04001086#endif