blob: dbb2b8d6b75fa626664ea8e88522ed8c3b3e98a3 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
66 this->destroyDrawingManager();
67 fMappedBufferManager.reset();
68
69 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
70 if (fResourceCache) {
71 fResourceCache->releaseAll();
72 }
Robert Phillipsad248452020-06-30 09:27:52 -040073}
Robert Phillipsa3457b82018-03-08 11:30:12 -050074
Adlai Holler61a591c2020-10-12 12:38:33 -040075sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
76 return INHERITED::threadSafeProxy();
77}
78
Adlai Hollera7a40442020-10-09 09:49:42 -040079void GrDirectContext::resetGLTextureBindings() {
80 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
81 return;
82 }
83 fGpu->resetTextureBindings();
84}
85
86void GrDirectContext::resetContext(uint32_t state) {
87 ASSERT_SINGLE_OWNER
88 fGpu->markContextDirty(state);
89}
90
Robert Phillipsad248452020-06-30 09:27:52 -040091void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040092 if (INHERITED::abandoned()) {
93 return;
94 }
95
Robert Phillipsad248452020-06-30 09:27:52 -040096 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -040097
98 fStrikeCache->freeAll();
99
100 fMappedBufferManager->abandon();
101
102 fResourceProvider->abandon();
103
Robert Phillipseb999bc2020-11-03 08:41:47 -0500104 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400105 fResourceCache->abandonAll();
106
107 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
108
109 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400110 if (fSmallPathAtlasMgr) {
111 fSmallPathAtlasMgr->reset();
112 }
Robert Phillipsad248452020-06-30 09:27:52 -0400113 fAtlasManager->freeAll();
114}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116bool GrDirectContext::abandoned() {
117 if (INHERITED::abandoned()) {
118 return true;
119 }
120
121 if (fGpu && fGpu->isDeviceLost()) {
122 this->abandonContext();
123 return true;
124 }
125 return false;
126}
127
Adlai Holler61a591c2020-10-12 12:38:33 -0400128bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
129
Robert Phillipsad248452020-06-30 09:27:52 -0400130void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400131 if (INHERITED::abandoned()) {
132 return;
133 }
134
135 INHERITED::abandonContext();
136
137 fMappedBufferManager.reset();
138
139 fResourceProvider->abandon();
140
141 // Release all resources in the backend 3D API.
142 fResourceCache->releaseAll();
143
144 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400145 if (fSmallPathAtlasMgr) {
146 fSmallPathAtlasMgr->reset();
147 }
Robert Phillipsad248452020-06-30 09:27:52 -0400148 fAtlasManager->freeAll();
149}
Robert Phillips6db27c22019-05-01 10:43:56 -0400150
Robert Phillipsad248452020-06-30 09:27:52 -0400151void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400152 ASSERT_SINGLE_OWNER
153
154 if (this->abandoned()) {
155 return;
156 }
157
Robert Phillipsad248452020-06-30 09:27:52 -0400158 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400159 if (fSmallPathAtlasMgr) {
160 fSmallPathAtlasMgr->reset();
161 }
Robert Phillipsad248452020-06-30 09:27:52 -0400162 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500163
Adlai Holler4aa4c602020-10-12 13:58:52 -0400164 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
165 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
166 fStrikeCache->freeAll();
167
168 this->drawingManager()->freeGpuResources();
169
170 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400171}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500172
Robert Phillipsad248452020-06-30 09:27:52 -0400173bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400174 ASSERT_SINGLE_OWNER
175 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400176 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500177 }
178
Adlai Holler9555f292020-10-09 09:41:14 -0400179 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400180 if (!INHERITED::init()) {
181 return false;
182 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500183
Adlai Holler9555f292020-10-09 09:41:14 -0400184 SkASSERT(this->getTextBlobCache());
185 SkASSERT(this->threadSafeCache());
186
187 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500188 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400189 fResourceCache->setProxyProvider(this->proxyProvider());
190 fResourceCache->setThreadSafeCache(this->threadSafeCache());
191 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
192 this->singleOwner());
193 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
194
195 fDidTestPMConversions = false;
196
197 // DDL TODO: we need to think through how the task group & persistent cache
198 // get passed on to/shared between all the DDLRecorders created with this context.
199 if (this->options().fExecutor) {
200 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
201 }
202
203 fPersistentCache = this->options().fPersistentCache;
204 fShaderErrorHandler = this->options().fShaderErrorHandler;
205 if (!fShaderErrorHandler) {
206 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
207 }
208
Robert Phillipsad248452020-06-30 09:27:52 -0400209 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
210 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
211 // multitexturing supported only if range can represent the index + texcoords fully
212 !(this->caps()->shaderCaps()->floatIs32Bits() ||
213 this->caps()->shaderCaps()->integerSupport())) {
214 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
215 } else {
216 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
217 }
218
219 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
220
Robert Phillips3262bc82020-08-10 12:11:58 -0400221 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
222 this->options().fGlyphCacheTextureMaximumBytes,
223 allowMultitexturing);
224 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400225
226 return true;
227}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500228
Adlai Holler3a508e92020-10-12 13:58:01 -0400229void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
230 ASSERT_SINGLE_OWNER
231
232 if (resourceCount) {
233 *resourceCount = fResourceCache->getBudgetedResourceCount();
234 }
235 if (resourceBytes) {
236 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
237 }
238}
239
240size_t GrDirectContext::getResourceCachePurgeableBytes() const {
241 ASSERT_SINGLE_OWNER
242 return fResourceCache->getPurgeableBytes();
243}
244
245void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
246 ASSERT_SINGLE_OWNER
247 if (maxResources) {
248 *maxResources = -1;
249 }
250 if (maxResourceBytes) {
251 *maxResourceBytes = this->getResourceCacheLimit();
252 }
253}
254
255size_t GrDirectContext::getResourceCacheLimit() const {
256 ASSERT_SINGLE_OWNER
257 return fResourceCache->getMaxResourceBytes();
258}
259
260void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
261 ASSERT_SINGLE_OWNER
262 this->setResourceCacheLimit(maxResourceBytes);
263}
264
265void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
266 ASSERT_SINGLE_OWNER
267 fResourceCache->setLimit(maxResourceBytes);
268}
269
Adlai Holler4aa4c602020-10-12 13:58:52 -0400270void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
271 ASSERT_SINGLE_OWNER
272
273 if (this->abandoned()) {
274 return;
275 }
276
277 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
278 fResourceCache->purgeAsNeeded();
279
280 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
281 // place to purge stale blobs
282 this->getTextBlobCache()->purgeStaleBlobs();
283}
284
285void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
286 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
287
288 ASSERT_SINGLE_OWNER
289
290 if (this->abandoned()) {
291 return;
292 }
293
294 this->checkAsyncWorkCompletion();
295 fMappedBufferManager->process();
296 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
297
298 fResourceCache->purgeAsNeeded();
299 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
300
301 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
302 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
303 }
304
305 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
306 // place to purge stale blobs
307 this->getTextBlobCache()->purgeStaleBlobs();
308}
309
310void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
311 ASSERT_SINGLE_OWNER
312
313 if (this->abandoned()) {
314 return;
315 }
316
317 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
318}
319
Adlai Holler3acc69a2020-10-13 08:20:51 -0400320////////////////////////////////////////////////////////////////////////////////
321bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
322 bool deleteSemaphoresAfterWait) {
323 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
324 return false;
325 }
326 GrWrapOwnership ownership =
327 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
328 for (int i = 0; i < numSemaphores; ++i) {
329 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
330 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
331 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
332 // to begin with. Therefore, it is fine to not wait on it.
333 if (sema) {
334 fGpu->waitSemaphore(sema.get());
335 }
336 }
337 return true;
338}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400339
Robert Phillips5edf5102020-08-10 16:30:36 -0400340GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400341 if (!fSmallPathAtlasMgr) {
342 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
343
344 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
345 }
346
347 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
348 return nullptr;
349 }
350
351 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400352}
353
Adlai Holler3acc69a2020-10-13 08:20:51 -0400354////////////////////////////////////////////////////////////////////////////////
355
356GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
357 ASSERT_SINGLE_OWNER
358 if (this->abandoned()) {
359 if (info.fFinishedProc) {
360 info.fFinishedProc(info.fFinishedContext);
361 }
362 if (info.fSubmittedProc) {
363 info.fSubmittedProc(info.fSubmittedContext, false);
364 }
365 return GrSemaphoresSubmitted::kNo;
366 }
367
Robert Phillips80bfda82020-11-12 09:23:36 -0500368 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
369 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400370}
371
372bool GrDirectContext::submit(bool syncCpu) {
373 ASSERT_SINGLE_OWNER
374 if (this->abandoned()) {
375 return false;
376 }
377
378 if (!fGpu) {
379 return false;
380 }
381
382 return fGpu->submitToGpu(syncCpu);
383}
384
385////////////////////////////////////////////////////////////////////////////////
386
387void GrDirectContext::checkAsyncWorkCompletion() {
388 if (fGpu) {
389 fGpu->checkFinishProcs();
390 }
391}
392
393////////////////////////////////////////////////////////////////////////////////
394
395void GrDirectContext::storeVkPipelineCacheData() {
396 if (fGpu) {
397 fGpu->storeVkPipelineCacheData();
398 }
399}
400
401////////////////////////////////////////////////////////////////////////////////
402
403bool GrDirectContext::supportsDistanceFieldText() const {
404 return this->caps()->shaderCaps()->supportsDistanceFieldText();
405}
406
407//////////////////////////////////////////////////////////////////////////////
408
409void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
410 ASSERT_SINGLE_OWNER
411 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
412 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
413 this->getTextBlobCache()->usedBytes());
414}
415
416size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
417 bool useNextPow2) {
418 if (!image->isTextureBacked()) {
419 return 0;
420 }
421 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
422 GrTextureProxy* proxy = gpuImage->peekProxy();
423 if (!proxy) {
424 return 0;
425 }
426
427 int colorSamplesPerPixel = 1;
428 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
429 colorSamplesPerPixel, mipMapped, useNextPow2);
430}
431
Adlai Holler98dd0042020-10-13 10:04:00 -0400432GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
433 const GrBackendFormat& backendFormat,
434 GrMipmapped mipMapped,
435 GrRenderable renderable,
436 GrProtected isProtected) {
437 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
438 if (this->abandoned()) {
439 return GrBackendTexture();
440 }
441
442 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
443 mipMapped, isProtected);
444}
445
446GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
447 SkColorType skColorType,
448 GrMipmapped mipMapped,
449 GrRenderable renderable,
450 GrProtected isProtected) {
451 if (this->abandoned()) {
452 return GrBackendTexture();
453 }
454
455 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
456
457 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
458}
459
460static GrBackendTexture create_and_update_backend_texture(
461 GrDirectContext* dContext,
462 SkISize dimensions,
463 const GrBackendFormat& backendFormat,
464 GrMipmapped mipMapped,
465 GrRenderable renderable,
466 GrProtected isProtected,
467 sk_sp<GrRefCntedCallback> finishedCallback,
468 const GrGpu::BackendTextureData* data) {
469 GrGpu* gpu = dContext->priv().getGpu();
470
471 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
472 mipMapped, isProtected);
473 if (!beTex.isValid()) {
474 return {};
475 }
476
477 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
478 std::move(finishedCallback),
479 data)) {
480 dContext->deleteBackendTexture(beTex);
481 return {};
482 }
483 return beTex;
484}
485
Brian Salomonb5f880a2020-12-07 11:30:16 -0500486static bool update_texture_with_pixmaps(GrGpu* gpu,
487 const SkPixmap* srcData,
488 int numLevels,
489 const GrBackendTexture& backendTexture,
490 GrSurfaceOrigin textureOrigin,
491 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500492 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
493 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
494
495 size_t size = 0;
496 for (int i = 0; i < numLevels; ++i) {
497 size_t minRowBytes = srcData[i].info().minRowBytes();
498 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
499 size += minRowBytes * srcData[i].height();
500 }
501 }
502
Brian Salomonb5f880a2020-12-07 11:30:16 -0500503 std::unique_ptr<char[]> tempStorage;
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500504 if (size) {
Brian Salomonb5f880a2020-12-07 11:30:16 -0500505 tempStorage.reset(new char[size]);
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500506 }
507 size = 0;
508 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
509 for (int i = 0; i < numLevels; ++i) {
510 size_t minRowBytes = srcData[i].info().minRowBytes();
511 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
512 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
513 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
514 size += minRowBytes*srcData[i].height();
515 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500516 tempPixmaps[i] = srcData[i];
517 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500518 }
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500519
Brian Salomon05487ab2020-12-23 20:32:22 -0500520 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500521 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
522}
523
Adlai Holler98dd0042020-10-13 10:04:00 -0400524GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
525 const GrBackendFormat& backendFormat,
526 const SkColor4f& color,
527 GrMipmapped mipMapped,
528 GrRenderable renderable,
529 GrProtected isProtected,
530 GrGpuFinishedProc finishedProc,
531 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500532 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400533
534 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
535 if (this->abandoned()) {
536 return {};
537 }
538
539 GrGpu::BackendTextureData data(color);
540 return create_and_update_backend_texture(this, {width, height},
541 backendFormat, mipMapped, renderable, isProtected,
542 std::move(finishedCallback), &data);
543}
544
545GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
546 SkColorType skColorType,
547 const SkColor4f& color,
548 GrMipmapped mipMapped,
549 GrRenderable renderable,
550 GrProtected isProtected,
551 GrGpuFinishedProc finishedProc,
552 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500553 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400554
555 if (this->abandoned()) {
556 return {};
557 }
558
559 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
560 if (!format.isValid()) {
561 return {};
562 }
563
564 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
565 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
566
567 GrGpu::BackendTextureData data(swizzledColor);
568 return create_and_update_backend_texture(this, {width, height}, format,
569 mipMapped, renderable, isProtected,
570 std::move(finishedCallback), &data);
571}
572
573GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
574 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500575 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400576 GrRenderable renderable,
577 GrProtected isProtected,
578 GrGpuFinishedProc finishedProc,
579 GrGpuFinishedContext finishedContext) {
580 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
581
Brian Salomon694ff172020-11-04 16:54:28 -0500582 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400583
584 if (this->abandoned()) {
585 return {};
586 }
587
588 if (!srcData || numProvidedLevels <= 0) {
589 return {};
590 }
591
592 int baseWidth = srcData[0].width();
593 int baseHeight = srcData[0].height();
594 SkColorType colorType = srcData[0].colorType();
595
596 GrMipmapped mipMapped = GrMipmapped::kNo;
597 int numExpectedLevels = 1;
598 if (numProvidedLevels > 1) {
599 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
600 mipMapped = GrMipmapped::kYes;
601 }
602
603 if (numProvidedLevels != numExpectedLevels) {
604 return {};
605 }
606
607 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500608 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
609 srcData[0].height(),
610 backendFormat,
611 mipMapped,
612 renderable,
613 isProtected);
614 if (!beTex.isValid()) {
615 return {};
616 }
617 if (!update_texture_with_pixmaps(this->priv().getGpu(),
618 srcData,
619 numProvidedLevels,
620 beTex,
621 textureOrigin,
622 std::move(finishedCallback))) {
623 this->deleteBackendTexture(beTex);
624 return {};
625 }
626 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400627}
628
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400629bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
630 const SkColor4f& color,
631 GrGpuFinishedProc finishedProc,
632 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500633 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400634
635 if (this->abandoned()) {
636 return false;
637 }
638
639 GrGpu::BackendTextureData data(color);
640 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
641}
642
643bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
644 SkColorType skColorType,
645 const SkColor4f& color,
646 GrGpuFinishedProc finishedProc,
647 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500648 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400649
650 if (this->abandoned()) {
651 return false;
652 }
653
654 GrBackendFormat format = backendTexture.getBackendFormat();
655 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
656
657 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
658 return false;
659 }
660
661 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
662 GrGpu::BackendTextureData data(swizzle.applyTo(color));
663
664 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
665}
666
667bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
668 const SkPixmap srcData[],
669 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500670 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400671 GrGpuFinishedProc finishedProc,
672 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500673 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400674
675 if (this->abandoned()) {
676 return false;
677 }
678
679 if (!srcData || numLevels <= 0) {
680 return false;
681 }
682
683 int numExpectedLevels = 1;
684 if (backendTexture.hasMipmaps()) {
685 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
686 backendTexture.height()) + 1;
687 }
688 if (numLevels != numExpectedLevels) {
689 return false;
690 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500691 return update_texture_with_pixmaps(fGpu.get(),
692 srcData,
693 numLevels,
694 backendTexture,
695 textureOrigin,
696 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400697}
698
Adlai Holler64e13832020-10-13 08:21:56 -0400699//////////////////////////////////////////////////////////////////////////////
700
701static GrBackendTexture create_and_update_compressed_backend_texture(
702 GrDirectContext* dContext,
703 SkISize dimensions,
704 const GrBackendFormat& backendFormat,
705 GrMipmapped mipMapped,
706 GrProtected isProtected,
707 sk_sp<GrRefCntedCallback> finishedCallback,
708 const GrGpu::BackendTextureData* data) {
709 GrGpu* gpu = dContext->priv().getGpu();
710
711 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
712 mipMapped, isProtected);
713 if (!beTex.isValid()) {
714 return {};
715 }
716
717 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
718 beTex, std::move(finishedCallback), data)) {
719 dContext->deleteBackendTexture(beTex);
720 return {};
721 }
722 return beTex;
723}
724
725GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
726 const GrBackendFormat& backendFormat,
727 const SkColor4f& color,
728 GrMipmapped mipMapped,
729 GrProtected isProtected,
730 GrGpuFinishedProc finishedProc,
731 GrGpuFinishedContext finishedContext) {
732 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500733 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400734
735 if (this->abandoned()) {
736 return {};
737 }
738
739 GrGpu::BackendTextureData data(color);
740 return create_and_update_compressed_backend_texture(this, {width, height},
741 backendFormat, mipMapped, isProtected,
742 std::move(finishedCallback), &data);
743}
744
745GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
746 SkImage::CompressionType compression,
747 const SkColor4f& color,
748 GrMipmapped mipMapped,
749 GrProtected isProtected,
750 GrGpuFinishedProc finishedProc,
751 GrGpuFinishedContext finishedContext) {
752 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
753 GrBackendFormat format = this->compressedBackendFormat(compression);
754 return this->createCompressedBackendTexture(width, height, format, color,
755 mipMapped, isProtected, finishedProc,
756 finishedContext);
757}
758
759GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
760 const GrBackendFormat& backendFormat,
761 const void* compressedData,
762 size_t dataSize,
763 GrMipmapped mipMapped,
764 GrProtected isProtected,
765 GrGpuFinishedProc finishedProc,
766 GrGpuFinishedContext finishedContext) {
767 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500768 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400769
770 if (this->abandoned()) {
771 return {};
772 }
773
774 GrGpu::BackendTextureData data(compressedData, dataSize);
775 return create_and_update_compressed_backend_texture(this, {width, height},
776 backendFormat, mipMapped, isProtected,
777 std::move(finishedCallback), &data);
778}
779
780GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
781 SkImage::CompressionType compression,
782 const void* data, size_t dataSize,
783 GrMipmapped mipMapped,
784 GrProtected isProtected,
785 GrGpuFinishedProc finishedProc,
786 GrGpuFinishedContext finishedContext) {
787 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
788 GrBackendFormat format = this->compressedBackendFormat(compression);
789 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
790 isProtected, finishedProc, finishedContext);
791}
792
793bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
794 const SkColor4f& color,
795 GrGpuFinishedProc finishedProc,
796 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500797 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400798
799 if (this->abandoned()) {
800 return false;
801 }
802
803 GrGpu::BackendTextureData data(color);
804 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
805}
806
807bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
808 const void* compressedData,
809 size_t dataSize,
810 GrGpuFinishedProc finishedProc,
811 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500812 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400813
814 if (this->abandoned()) {
815 return false;
816 }
817
818 if (!compressedData) {
819 return false;
820 }
821
822 GrGpu::BackendTextureData data(compressedData, dataSize);
823
824 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
825}
826
Adlai Holler6d0745b2020-10-13 13:29:00 -0400827//////////////////////////////////////////////////////////////////////////////
828
829bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
830 const GrBackendSurfaceMutableState& state,
831 GrBackendSurfaceMutableState* previousState,
832 GrGpuFinishedProc finishedProc,
833 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500834 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400835
836 if (this->abandoned()) {
837 return false;
838 }
839
840 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
841}
842
843
844bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
845 const GrBackendSurfaceMutableState& state,
846 GrBackendSurfaceMutableState* previousState,
847 GrGpuFinishedProc finishedProc,
848 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500849 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400850
851 if (this->abandoned()) {
852 return false;
853 }
854
855 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
856 std::move(callback));
857}
858
859void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
860 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
861 // For the Vulkan backend we still must destroy the backend texture when the context is
862 // abandoned.
863 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
864 return;
865 }
866
867 fGpu->deleteBackendTexture(backendTex);
868}
869
870//////////////////////////////////////////////////////////////////////////////
871
872bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
873 return fGpu->precompileShader(key, data);
874}
875
876#ifdef SK_ENABLE_DUMP_GPU
877#include "include/core/SkString.h"
878#include "src/utils/SkJSONWriter.h"
879SkString GrDirectContext::dump() const {
880 SkDynamicMemoryWStream stream;
881 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
882 writer.beginObject();
883
884 writer.appendString("backend", GrBackendApiToStr(this->backend()));
885
886 writer.appendName("caps");
887 this->caps()->dumpJSON(&writer);
888
889 writer.appendName("gpu");
890 this->fGpu->dumpJSON(&writer);
891
892 writer.appendName("context");
893 this->dumpJSON(&writer);
894
895 // Flush JSON to the memory stream
896 writer.endObject();
897 writer.flush();
898
899 // Null terminate the JSON data in the memory stream
900 stream.write8(0);
901
902 // Allocate a string big enough to hold all the data, then copy out of the stream
903 SkString result(stream.bytesWritten());
904 stream.copyToAndReset(result.writable_str());
905 return result;
906}
907#endif
908
John Rosascoa9b348f2019-11-08 13:18:15 -0800909#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400910
Robert Phillipsf4f80112020-07-13 16:13:31 -0400911/*************************************************************************************************/
912sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500913 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500914 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500915}
916
Robert Phillipsf4f80112020-07-13 16:13:31 -0400917sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400918 return MakeGL(nullptr, options);
919}
920
Robert Phillipsf4f80112020-07-13 16:13:31 -0400921sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400922 GrContextOptions defaultOptions;
923 return MakeGL(nullptr, defaultOptions);
924}
925
Brian Salomon24069eb2020-06-24 10:19:52 -0400926#if GR_TEST_UTILS
927GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
928 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
929 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
930 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
931 // on the thing it captures. So we leak the context.
932 struct GetErrorContext {
933 SkRandom fRandom;
934 GrGLFunction<GrGLGetErrorFn> fGetError;
935 };
936
937 auto errorContext = new GetErrorContext;
938
939#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
940 __lsan_ignore_object(errorContext);
941#endif
942
943 errorContext->fGetError = original;
944
945 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
946 GrGLenum error = errorContext->fGetError();
947 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
948 error = GR_GL_OUT_OF_MEMORY;
949 }
950 return error;
951 });
952}
953#endif
954
Robert Phillipsf4f80112020-07-13 16:13:31 -0400955sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
956 const GrContextOptions& options) {
957 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400958#if GR_TEST_UTILS
959 if (options.fRandomGLOOM) {
960 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
961 copy->fFunctions.fGetError =
962 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
963#if GR_GL_CHECK_ERROR
964 // Suppress logging GL errors since we'll be synthetically generating them.
965 copy->suppressErrorLogging();
966#endif
967 glInterface = std::move(copy);
968 }
969#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400970 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
971 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500972 return nullptr;
973 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400974 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500975}
John Rosascoa9b348f2019-11-08 13:18:15 -0800976#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500977
Robert Phillipsf4f80112020-07-13 16:13:31 -0400978/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400979sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
980 GrContextOptions defaultOptions;
981 return MakeMock(mockOptions, defaultOptions);
982}
983
984sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
985 const GrContextOptions& options) {
986 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
987
988 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
989 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500990 return nullptr;
991 }
Chris Daltona378b452019-12-11 13:24:11 -0500992
Robert Phillipsf4f80112020-07-13 16:13:31 -0400993 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500994}
995
Greg Danielb4d89562018-10-03 18:44:49 +0000996#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -0400997/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400998sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
999 GrContextOptions defaultOptions;
1000 return MakeVulkan(backendContext, defaultOptions);
1001}
1002
1003sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1004 const GrContextOptions& options) {
1005 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1006
1007 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1008 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001009 return nullptr;
1010 }
1011
Robert Phillipsf4f80112020-07-13 16:13:31 -04001012 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001013}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001014#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001015
1016#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001017/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001018sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001019 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001020 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001021}
1022
Jim Van Verth351c9b52020-11-12 15:21:11 -05001023sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1024 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001025 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001026
Jim Van Verth351c9b52020-11-12 15:21:11 -05001027 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001028 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001029 return nullptr;
1030 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001031
Robert Phillipsf4f80112020-07-13 16:13:31 -04001032 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001033}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001034
1035// deprecated
1036sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1037 GrContextOptions defaultOptions;
1038 return MakeMetal(device, queue, defaultOptions);
1039}
1040
1041// deprecated
1042// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1043sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1044 const GrContextOptions& options) {
1045 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1046 GrMtlBackendContext backendContext = {};
1047 backendContext.fDevice.reset(device);
1048 backendContext.fQueue.reset(queue);
1049
1050 return GrDirectContext::MakeMetal(backendContext, options);
1051}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001052#endif
1053
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001054#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001055/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001056sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1057 GrContextOptions defaultOptions;
1058 return MakeDirect3D(backendContext, defaultOptions);
1059}
1060
1061sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1062 const GrContextOptions& options) {
1063 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1064
1065 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1066 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001067 return nullptr;
1068 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001069
Robert Phillipsf4f80112020-07-13 16:13:31 -04001070 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001071}
1072#endif
1073
Stephen White985741a2019-07-18 11:43:45 -04001074#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001075/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001076sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001077 GrContextOptions defaultOptions;
1078 return MakeDawn(device, defaultOptions);
1079}
1080
Robert Phillipsf4f80112020-07-13 16:13:31 -04001081sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1082 const GrContextOptions& options) {
1083 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001084
Robert Phillipsf4f80112020-07-13 16:13:31 -04001085 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1086 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001087 return nullptr;
1088 }
1089
Robert Phillipsf4f80112020-07-13 16:13:31 -04001090 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001091}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001092
Stephen White985741a2019-07-18 11:43:45 -04001093#endif