blob: 3244ef6d69bf823ee06cd9d20a3fb9f10ae01b8f [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Mike Kleinc0bd9f92019-04-23 12:05:21 -050031#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#endif
33#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050036#ifdef SK_DIRECT3D
37#include "src/gpu/d3d/GrD3DGpu.h"
38#endif
Stephen White985741a2019-07-18 11:43:45 -040039#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050040#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040041#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040042#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050043
Brian Salomon24069eb2020-06-24 10:19:52 -040044#if GR_TEST_UTILS
45# include "include/utils/SkRandom.h"
46# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
47# include <sanitizer/lsan_interface.h>
48# endif
49#endif
50
Robert Phillips6db27c22019-05-01 10:43:56 -040051#ifdef SK_DISABLE_REDUCE_OPLIST_SPLITTING
Greg Danielf41b2bd2019-08-22 16:19:24 -040052static const bool kDefaultReduceOpsTaskSplitting = false;
Robert Phillips6db27c22019-05-01 10:43:56 -040053#else
Greg Danielf41b2bd2019-08-22 16:19:24 -040054static const bool kDefaultReduceOpsTaskSplitting = false;
Robert Phillips6db27c22019-05-01 10:43:56 -040055#endif
56
Adlai Holler9555f292020-10-09 09:41:14 -040057#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
58
Robert Phillipsad248452020-06-30 09:27:52 -040059GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040060 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040061}
Robert Phillipsa3457b82018-03-08 11:30:12 -050062
Robert Phillipsad248452020-06-30 09:27:52 -040063GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040064 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040065 // this if-test protects against the case where the context is being destroyed
66 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040067 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040068 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050069 }
Adlai Holler9555f292020-10-09 09:41:14 -040070
71 this->destroyDrawingManager();
72 fMappedBufferManager.reset();
73
74 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
75 if (fResourceCache) {
76 fResourceCache->releaseAll();
77 }
Robert Phillipsad248452020-06-30 09:27:52 -040078}
Robert Phillipsa3457b82018-03-08 11:30:12 -050079
Adlai Holler61a591c2020-10-12 12:38:33 -040080sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
81 return INHERITED::threadSafeProxy();
82}
83
Adlai Hollera7a40442020-10-09 09:49:42 -040084void GrDirectContext::resetGLTextureBindings() {
85 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
86 return;
87 }
88 fGpu->resetTextureBindings();
89}
90
91void GrDirectContext::resetContext(uint32_t state) {
92 ASSERT_SINGLE_OWNER
93 fGpu->markContextDirty(state);
94}
95
Robert Phillipsad248452020-06-30 09:27:52 -040096void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040097 if (INHERITED::abandoned()) {
98 return;
99 }
100
Robert Phillipsad248452020-06-30 09:27:52 -0400101 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400102
103 fStrikeCache->freeAll();
104
105 fMappedBufferManager->abandon();
106
107 fResourceProvider->abandon();
108
109 // abandon first to so destructors
110 // don't try to free the resources in the API.
111 fResourceCache->abandonAll();
112
113 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
114
115 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400116 if (fSmallPathAtlasMgr) {
117 fSmallPathAtlasMgr->reset();
118 }
Robert Phillipsad248452020-06-30 09:27:52 -0400119 fAtlasManager->freeAll();
120}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500121
Adlai Hollera7a40442020-10-09 09:49:42 -0400122bool GrDirectContext::abandoned() {
123 if (INHERITED::abandoned()) {
124 return true;
125 }
126
127 if (fGpu && fGpu->isDeviceLost()) {
128 this->abandonContext();
129 return true;
130 }
131 return false;
132}
133
Adlai Holler61a591c2020-10-12 12:38:33 -0400134bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
135
Robert Phillipsad248452020-06-30 09:27:52 -0400136void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400137 if (INHERITED::abandoned()) {
138 return;
139 }
140
141 INHERITED::abandonContext();
142
143 fMappedBufferManager.reset();
144
145 fResourceProvider->abandon();
146
147 // Release all resources in the backend 3D API.
148 fResourceCache->releaseAll();
149
150 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400151 if (fSmallPathAtlasMgr) {
152 fSmallPathAtlasMgr->reset();
153 }
Robert Phillipsad248452020-06-30 09:27:52 -0400154 fAtlasManager->freeAll();
155}
Robert Phillips6db27c22019-05-01 10:43:56 -0400156
Robert Phillipsad248452020-06-30 09:27:52 -0400157void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400158 ASSERT_SINGLE_OWNER
159
160 if (this->abandoned()) {
161 return;
162 }
163
Robert Phillipsad248452020-06-30 09:27:52 -0400164 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400165 if (fSmallPathAtlasMgr) {
166 fSmallPathAtlasMgr->reset();
167 }
Robert Phillipsad248452020-06-30 09:27:52 -0400168 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500169
Adlai Holler4aa4c602020-10-12 13:58:52 -0400170 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
171 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
172 fStrikeCache->freeAll();
173
174 this->drawingManager()->freeGpuResources();
175
176 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400177}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500178
Robert Phillipsad248452020-06-30 09:27:52 -0400179bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400180 ASSERT_SINGLE_OWNER
181 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400182 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500183 }
184
Adlai Holler9555f292020-10-09 09:41:14 -0400185 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400186 if (!INHERITED::init()) {
187 return false;
188 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500189
Adlai Holler9555f292020-10-09 09:41:14 -0400190 SkASSERT(this->getTextBlobCache());
191 SkASSERT(this->threadSafeCache());
192
193 fStrikeCache = std::make_unique<GrStrikeCache>();
194 fResourceCache = std::make_unique<GrResourceCache>(this->caps(), this->singleOwner(),
195 this->contextID());
196 fResourceCache->setProxyProvider(this->proxyProvider());
197 fResourceCache->setThreadSafeCache(this->threadSafeCache());
198 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
199 this->singleOwner());
200 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
201
202 fDidTestPMConversions = false;
203
204 // DDL TODO: we need to think through how the task group & persistent cache
205 // get passed on to/shared between all the DDLRecorders created with this context.
206 if (this->options().fExecutor) {
207 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
208 }
209
210 fPersistentCache = this->options().fPersistentCache;
211 fShaderErrorHandler = this->options().fShaderErrorHandler;
212 if (!fShaderErrorHandler) {
213 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
214 }
215
Robert Phillipsad248452020-06-30 09:27:52 -0400216 bool reduceOpsTaskSplitting = kDefaultReduceOpsTaskSplitting;
217 if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) {
218 reduceOpsTaskSplitting = false;
219 } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) {
220 reduceOpsTaskSplitting = true;
221 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500222
Robert Phillipsad248452020-06-30 09:27:52 -0400223 this->setupDrawingManager(true, reduceOpsTaskSplitting);
224
225 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
226 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
227 // multitexturing supported only if range can represent the index + texcoords fully
228 !(this->caps()->shaderCaps()->floatIs32Bits() ||
229 this->caps()->shaderCaps()->integerSupport())) {
230 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
231 } else {
232 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
233 }
234
235 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
236
Robert Phillips3262bc82020-08-10 12:11:58 -0400237 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
238 this->options().fGlyphCacheTextureMaximumBytes,
239 allowMultitexturing);
240 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400241
242 return true;
243}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500244
Adlai Holler3a508e92020-10-12 13:58:01 -0400245void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
246 ASSERT_SINGLE_OWNER
247
248 if (resourceCount) {
249 *resourceCount = fResourceCache->getBudgetedResourceCount();
250 }
251 if (resourceBytes) {
252 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
253 }
254}
255
256size_t GrDirectContext::getResourceCachePurgeableBytes() const {
257 ASSERT_SINGLE_OWNER
258 return fResourceCache->getPurgeableBytes();
259}
260
261void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
262 ASSERT_SINGLE_OWNER
263 if (maxResources) {
264 *maxResources = -1;
265 }
266 if (maxResourceBytes) {
267 *maxResourceBytes = this->getResourceCacheLimit();
268 }
269}
270
271size_t GrDirectContext::getResourceCacheLimit() const {
272 ASSERT_SINGLE_OWNER
273 return fResourceCache->getMaxResourceBytes();
274}
275
276void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
277 ASSERT_SINGLE_OWNER
278 this->setResourceCacheLimit(maxResourceBytes);
279}
280
281void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
282 ASSERT_SINGLE_OWNER
283 fResourceCache->setLimit(maxResourceBytes);
284}
285
Adlai Holler4aa4c602020-10-12 13:58:52 -0400286void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
287 ASSERT_SINGLE_OWNER
288
289 if (this->abandoned()) {
290 return;
291 }
292
293 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
294 fResourceCache->purgeAsNeeded();
295
296 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
297 // place to purge stale blobs
298 this->getTextBlobCache()->purgeStaleBlobs();
299}
300
301void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
302 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
303
304 ASSERT_SINGLE_OWNER
305
306 if (this->abandoned()) {
307 return;
308 }
309
310 this->checkAsyncWorkCompletion();
311 fMappedBufferManager->process();
312 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
313
314 fResourceCache->purgeAsNeeded();
315 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
316
317 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
318 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
319 }
320
321 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
322 // place to purge stale blobs
323 this->getTextBlobCache()->purgeStaleBlobs();
324}
325
326void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
327 ASSERT_SINGLE_OWNER
328
329 if (this->abandoned()) {
330 return;
331 }
332
333 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
334}
335
Adlai Holler3acc69a2020-10-13 08:20:51 -0400336////////////////////////////////////////////////////////////////////////////////
337bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
338 bool deleteSemaphoresAfterWait) {
339 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
340 return false;
341 }
342 GrWrapOwnership ownership =
343 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
344 for (int i = 0; i < numSemaphores; ++i) {
345 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
346 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
347 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
348 // to begin with. Therefore, it is fine to not wait on it.
349 if (sema) {
350 fGpu->waitSemaphore(sema.get());
351 }
352 }
353 return true;
354}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400355
Robert Phillips5edf5102020-08-10 16:30:36 -0400356GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400357 if (!fSmallPathAtlasMgr) {
358 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
359
360 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
361 }
362
363 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
364 return nullptr;
365 }
366
367 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400368}
369
Adlai Holler3acc69a2020-10-13 08:20:51 -0400370////////////////////////////////////////////////////////////////////////////////
371
372GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
373 ASSERT_SINGLE_OWNER
374 if (this->abandoned()) {
375 if (info.fFinishedProc) {
376 info.fFinishedProc(info.fFinishedContext);
377 }
378 if (info.fSubmittedProc) {
379 info.fSubmittedProc(info.fSubmittedContext, false);
380 }
381 return GrSemaphoresSubmitted::kNo;
382 }
383
384 bool flushed = this->drawingManager()->flush(
385 nullptr, 0, SkSurface::BackendSurfaceAccess::kNoAccess, info, nullptr);
386
387 if (!flushed || (!this->priv().caps()->semaphoreSupport() && info.fNumSemaphores)) {
388 return GrSemaphoresSubmitted::kNo;
389 }
390 return GrSemaphoresSubmitted::kYes;
391}
392
393bool GrDirectContext::submit(bool syncCpu) {
394 ASSERT_SINGLE_OWNER
395 if (this->abandoned()) {
396 return false;
397 }
398
399 if (!fGpu) {
400 return false;
401 }
402
403 return fGpu->submitToGpu(syncCpu);
404}
405
406////////////////////////////////////////////////////////////////////////////////
407
408void GrDirectContext::checkAsyncWorkCompletion() {
409 if (fGpu) {
410 fGpu->checkFinishProcs();
411 }
412}
413
414////////////////////////////////////////////////////////////////////////////////
415
416void GrDirectContext::storeVkPipelineCacheData() {
417 if (fGpu) {
418 fGpu->storeVkPipelineCacheData();
419 }
420}
421
422////////////////////////////////////////////////////////////////////////////////
423
424bool GrDirectContext::supportsDistanceFieldText() const {
425 return this->caps()->shaderCaps()->supportsDistanceFieldText();
426}
427
428//////////////////////////////////////////////////////////////////////////////
429
430void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
431 ASSERT_SINGLE_OWNER
432 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
433 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
434 this->getTextBlobCache()->usedBytes());
435}
436
437size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
438 bool useNextPow2) {
439 if (!image->isTextureBacked()) {
440 return 0;
441 }
442 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
443 GrTextureProxy* proxy = gpuImage->peekProxy();
444 if (!proxy) {
445 return 0;
446 }
447
448 int colorSamplesPerPixel = 1;
449 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
450 colorSamplesPerPixel, mipMapped, useNextPow2);
451}
452
Adlai Holler98dd0042020-10-13 10:04:00 -0400453GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
454 const GrBackendFormat& backendFormat,
455 GrMipmapped mipMapped,
456 GrRenderable renderable,
457 GrProtected isProtected) {
458 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
459 if (this->abandoned()) {
460 return GrBackendTexture();
461 }
462
463 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
464 mipMapped, isProtected);
465}
466
467GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
468 SkColorType skColorType,
469 GrMipmapped mipMapped,
470 GrRenderable renderable,
471 GrProtected isProtected) {
472 if (this->abandoned()) {
473 return GrBackendTexture();
474 }
475
476 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
477
478 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
479}
480
481static GrBackendTexture create_and_update_backend_texture(
482 GrDirectContext* dContext,
483 SkISize dimensions,
484 const GrBackendFormat& backendFormat,
485 GrMipmapped mipMapped,
486 GrRenderable renderable,
487 GrProtected isProtected,
488 sk_sp<GrRefCntedCallback> finishedCallback,
489 const GrGpu::BackendTextureData* data) {
490 GrGpu* gpu = dContext->priv().getGpu();
491
492 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
493 mipMapped, isProtected);
494 if (!beTex.isValid()) {
495 return {};
496 }
497
498 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
499 std::move(finishedCallback),
500 data)) {
501 dContext->deleteBackendTexture(beTex);
502 return {};
503 }
504 return beTex;
505}
506
507GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
508 const GrBackendFormat& backendFormat,
509 const SkColor4f& color,
510 GrMipmapped mipMapped,
511 GrRenderable renderable,
512 GrProtected isProtected,
513 GrGpuFinishedProc finishedProc,
514 GrGpuFinishedContext finishedContext) {
515 sk_sp<GrRefCntedCallback> finishedCallback;
516 if (finishedProc) {
517 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
518 }
519
520 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
521 if (this->abandoned()) {
522 return {};
523 }
524
525 GrGpu::BackendTextureData data(color);
526 return create_and_update_backend_texture(this, {width, height},
527 backendFormat, mipMapped, renderable, isProtected,
528 std::move(finishedCallback), &data);
529}
530
531GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
532 SkColorType skColorType,
533 const SkColor4f& color,
534 GrMipmapped mipMapped,
535 GrRenderable renderable,
536 GrProtected isProtected,
537 GrGpuFinishedProc finishedProc,
538 GrGpuFinishedContext finishedContext) {
539 sk_sp<GrRefCntedCallback> finishedCallback;
540 if (finishedProc) {
541 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
542 }
543
544 if (this->abandoned()) {
545 return {};
546 }
547
548 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
549 if (!format.isValid()) {
550 return {};
551 }
552
553 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
554 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
555
556 GrGpu::BackendTextureData data(swizzledColor);
557 return create_and_update_backend_texture(this, {width, height}, format,
558 mipMapped, renderable, isProtected,
559 std::move(finishedCallback), &data);
560}
561
562GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
563 int numProvidedLevels,
564 GrRenderable renderable,
565 GrProtected isProtected,
566 GrGpuFinishedProc finishedProc,
567 GrGpuFinishedContext finishedContext) {
568 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
569
570 sk_sp<GrRefCntedCallback> finishedCallback;
571 if (finishedProc) {
572 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
573 }
574
575 if (this->abandoned()) {
576 return {};
577 }
578
579 if (!srcData || numProvidedLevels <= 0) {
580 return {};
581 }
582
583 int baseWidth = srcData[0].width();
584 int baseHeight = srcData[0].height();
585 SkColorType colorType = srcData[0].colorType();
586
587 GrMipmapped mipMapped = GrMipmapped::kNo;
588 int numExpectedLevels = 1;
589 if (numProvidedLevels > 1) {
590 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
591 mipMapped = GrMipmapped::kYes;
592 }
593
594 if (numProvidedLevels != numExpectedLevels) {
595 return {};
596 }
597
598 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
599
600 GrGpu::BackendTextureData data(srcData);
601 return create_and_update_backend_texture(this, {baseWidth, baseHeight},
602 backendFormat, mipMapped, renderable, isProtected,
603 std::move(finishedCallback), &data);
604}
605
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400606bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
607 const SkColor4f& color,
608 GrGpuFinishedProc finishedProc,
609 GrGpuFinishedContext finishedContext) {
610 sk_sp<GrRefCntedCallback> finishedCallback;
611 if (finishedProc) {
612 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
613 }
614
615 if (this->abandoned()) {
616 return false;
617 }
618
619 GrGpu::BackendTextureData data(color);
620 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
621}
622
623bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
624 SkColorType skColorType,
625 const SkColor4f& color,
626 GrGpuFinishedProc finishedProc,
627 GrGpuFinishedContext finishedContext) {
628 sk_sp<GrRefCntedCallback> finishedCallback;
629 if (finishedProc) {
630 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
631 }
632
633 if (this->abandoned()) {
634 return false;
635 }
636
637 GrBackendFormat format = backendTexture.getBackendFormat();
638 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
639
640 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
641 return false;
642 }
643
644 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
645 GrGpu::BackendTextureData data(swizzle.applyTo(color));
646
647 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
648}
649
650bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
651 const SkPixmap srcData[],
652 int numLevels,
653 GrGpuFinishedProc finishedProc,
654 GrGpuFinishedContext finishedContext) {
655 sk_sp<GrRefCntedCallback> finishedCallback;
656 if (finishedProc) {
657 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
658 }
659
660 if (this->abandoned()) {
661 return false;
662 }
663
664 if (!srcData || numLevels <= 0) {
665 return false;
666 }
667
668 int numExpectedLevels = 1;
669 if (backendTexture.hasMipmaps()) {
670 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
671 backendTexture.height()) + 1;
672 }
673 if (numLevels != numExpectedLevels) {
674 return false;
675 }
676
677 GrGpu::BackendTextureData data(srcData);
678 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
679}
680
Adlai Holler64e13832020-10-13 08:21:56 -0400681//////////////////////////////////////////////////////////////////////////////
682
683static GrBackendTexture create_and_update_compressed_backend_texture(
684 GrDirectContext* dContext,
685 SkISize dimensions,
686 const GrBackendFormat& backendFormat,
687 GrMipmapped mipMapped,
688 GrProtected isProtected,
689 sk_sp<GrRefCntedCallback> finishedCallback,
690 const GrGpu::BackendTextureData* data) {
691 GrGpu* gpu = dContext->priv().getGpu();
692
693 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
694 mipMapped, isProtected);
695 if (!beTex.isValid()) {
696 return {};
697 }
698
699 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
700 beTex, std::move(finishedCallback), data)) {
701 dContext->deleteBackendTexture(beTex);
702 return {};
703 }
704 return beTex;
705}
706
707GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
708 const GrBackendFormat& backendFormat,
709 const SkColor4f& color,
710 GrMipmapped mipMapped,
711 GrProtected isProtected,
712 GrGpuFinishedProc finishedProc,
713 GrGpuFinishedContext finishedContext) {
714 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
715 sk_sp<GrRefCntedCallback> finishedCallback;
716 if (finishedProc) {
717 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
718 }
719
720 if (this->abandoned()) {
721 return {};
722 }
723
724 GrGpu::BackendTextureData data(color);
725 return create_and_update_compressed_backend_texture(this, {width, height},
726 backendFormat, mipMapped, isProtected,
727 std::move(finishedCallback), &data);
728}
729
730GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
731 SkImage::CompressionType compression,
732 const SkColor4f& color,
733 GrMipmapped mipMapped,
734 GrProtected isProtected,
735 GrGpuFinishedProc finishedProc,
736 GrGpuFinishedContext finishedContext) {
737 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
738 GrBackendFormat format = this->compressedBackendFormat(compression);
739 return this->createCompressedBackendTexture(width, height, format, color,
740 mipMapped, isProtected, finishedProc,
741 finishedContext);
742}
743
744GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
745 const GrBackendFormat& backendFormat,
746 const void* compressedData,
747 size_t dataSize,
748 GrMipmapped mipMapped,
749 GrProtected isProtected,
750 GrGpuFinishedProc finishedProc,
751 GrGpuFinishedContext finishedContext) {
752 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
753 sk_sp<GrRefCntedCallback> finishedCallback;
754 if (finishedProc) {
755 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
756 }
757
758 if (this->abandoned()) {
759 return {};
760 }
761
762 GrGpu::BackendTextureData data(compressedData, dataSize);
763 return create_and_update_compressed_backend_texture(this, {width, height},
764 backendFormat, mipMapped, isProtected,
765 std::move(finishedCallback), &data);
766}
767
768GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
769 SkImage::CompressionType compression,
770 const void* data, size_t dataSize,
771 GrMipmapped mipMapped,
772 GrProtected isProtected,
773 GrGpuFinishedProc finishedProc,
774 GrGpuFinishedContext finishedContext) {
775 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
776 GrBackendFormat format = this->compressedBackendFormat(compression);
777 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
778 isProtected, finishedProc, finishedContext);
779}
780
781bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
782 const SkColor4f& color,
783 GrGpuFinishedProc finishedProc,
784 GrGpuFinishedContext finishedContext) {
785 sk_sp<GrRefCntedCallback> finishedCallback;
786 if (finishedProc) {
787 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
788 }
789
790 if (this->abandoned()) {
791 return false;
792 }
793
794 GrGpu::BackendTextureData data(color);
795 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
796}
797
798bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
799 const void* compressedData,
800 size_t dataSize,
801 GrGpuFinishedProc finishedProc,
802 GrGpuFinishedContext finishedContext) {
803 sk_sp<GrRefCntedCallback> finishedCallback;
804 if (finishedProc) {
805 finishedCallback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
806 }
807
808 if (this->abandoned()) {
809 return false;
810 }
811
812 if (!compressedData) {
813 return false;
814 }
815
816 GrGpu::BackendTextureData data(compressedData, dataSize);
817
818 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
819}
820
Adlai Holler6d0745b2020-10-13 13:29:00 -0400821//////////////////////////////////////////////////////////////////////////////
822
823bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
824 const GrBackendSurfaceMutableState& state,
825 GrBackendSurfaceMutableState* previousState,
826 GrGpuFinishedProc finishedProc,
827 GrGpuFinishedContext finishedContext) {
828 sk_sp<GrRefCntedCallback> callback;
829 if (finishedProc) {
830 callback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
831 }
832
833 if (this->abandoned()) {
834 return false;
835 }
836
837 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
838}
839
840
841bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
842 const GrBackendSurfaceMutableState& state,
843 GrBackendSurfaceMutableState* previousState,
844 GrGpuFinishedProc finishedProc,
845 GrGpuFinishedContext finishedContext) {
846 sk_sp<GrRefCntedCallback> callback;
847 if (finishedProc) {
848 callback.reset(new GrRefCntedCallback(finishedProc, finishedContext));
849 }
850
851 if (this->abandoned()) {
852 return false;
853 }
854
855 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
856 std::move(callback));
857}
858
859void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
860 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
861 // For the Vulkan backend we still must destroy the backend texture when the context is
862 // abandoned.
863 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
864 return;
865 }
866
867 fGpu->deleteBackendTexture(backendTex);
868}
869
870//////////////////////////////////////////////////////////////////////////////
871
872bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
873 return fGpu->precompileShader(key, data);
874}
875
876#ifdef SK_ENABLE_DUMP_GPU
877#include "include/core/SkString.h"
878#include "src/utils/SkJSONWriter.h"
879SkString GrDirectContext::dump() const {
880 SkDynamicMemoryWStream stream;
881 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
882 writer.beginObject();
883
884 writer.appendString("backend", GrBackendApiToStr(this->backend()));
885
886 writer.appendName("caps");
887 this->caps()->dumpJSON(&writer);
888
889 writer.appendName("gpu");
890 this->fGpu->dumpJSON(&writer);
891
892 writer.appendName("context");
893 this->dumpJSON(&writer);
894
895 // Flush JSON to the memory stream
896 writer.endObject();
897 writer.flush();
898
899 // Null terminate the JSON data in the memory stream
900 stream.write8(0);
901
902 // Allocate a string big enough to hold all the data, then copy out of the stream
903 SkString result(stream.bytesWritten());
904 stream.copyToAndReset(result.writable_str());
905 return result;
906}
907#endif
908
John Rosascoa9b348f2019-11-08 13:18:15 -0800909#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400910
Robert Phillipsf4f80112020-07-13 16:13:31 -0400911/*************************************************************************************************/
912sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500913 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500914 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500915}
916
Robert Phillipsf4f80112020-07-13 16:13:31 -0400917sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400918 return MakeGL(nullptr, options);
919}
920
Robert Phillipsf4f80112020-07-13 16:13:31 -0400921sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400922 GrContextOptions defaultOptions;
923 return MakeGL(nullptr, defaultOptions);
924}
925
Brian Salomon24069eb2020-06-24 10:19:52 -0400926#if GR_TEST_UTILS
927GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
928 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
929 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
930 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
931 // on the thing it captures. So we leak the context.
932 struct GetErrorContext {
933 SkRandom fRandom;
934 GrGLFunction<GrGLGetErrorFn> fGetError;
935 };
936
937 auto errorContext = new GetErrorContext;
938
939#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
940 __lsan_ignore_object(errorContext);
941#endif
942
943 errorContext->fGetError = original;
944
945 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
946 GrGLenum error = errorContext->fGetError();
947 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
948 error = GR_GL_OUT_OF_MEMORY;
949 }
950 return error;
951 });
952}
953#endif
954
Robert Phillipsf4f80112020-07-13 16:13:31 -0400955sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
956 const GrContextOptions& options) {
957 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400958#if GR_TEST_UTILS
959 if (options.fRandomGLOOM) {
960 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
961 copy->fFunctions.fGetError =
962 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
963#if GR_GL_CHECK_ERROR
964 // Suppress logging GL errors since we'll be synthetically generating them.
965 copy->suppressErrorLogging();
966#endif
967 glInterface = std::move(copy);
968 }
969#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400970 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
971 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500972 return nullptr;
973 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400974 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500975}
John Rosascoa9b348f2019-11-08 13:18:15 -0800976#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500977
Robert Phillipsf4f80112020-07-13 16:13:31 -0400978/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400979sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
980 GrContextOptions defaultOptions;
981 return MakeMock(mockOptions, defaultOptions);
982}
983
984sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
985 const GrContextOptions& options) {
986 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
987
988 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
989 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500990 return nullptr;
991 }
Chris Daltona378b452019-12-11 13:24:11 -0500992
Robert Phillipsf4f80112020-07-13 16:13:31 -0400993 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500994}
995
Greg Danielb4d89562018-10-03 18:44:49 +0000996#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -0400997/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400998sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
999 GrContextOptions defaultOptions;
1000 return MakeVulkan(backendContext, defaultOptions);
1001}
1002
1003sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1004 const GrContextOptions& options) {
1005 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1006
1007 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1008 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001009 return nullptr;
1010 }
1011
Robert Phillipsf4f80112020-07-13 16:13:31 -04001012 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001013}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001014#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001015
1016#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001017/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001018sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001019 GrContextOptions defaultOptions;
1020 return MakeMetal(device, queue, defaultOptions);
1021}
1022
Robert Phillipsf4f80112020-07-13 16:13:31 -04001023sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1024 const GrContextOptions& options) {
1025 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001026
Robert Phillipsf4f80112020-07-13 16:13:31 -04001027 direct->fGpu = GrMtlTrampoline::MakeGpu(direct.get(), options, device, queue);
1028 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001029 return nullptr;
1030 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001031
Robert Phillipsf4f80112020-07-13 16:13:31 -04001032 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001033}
1034#endif
1035
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001036#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001037/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001038sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1039 GrContextOptions defaultOptions;
1040 return MakeDirect3D(backendContext, defaultOptions);
1041}
1042
1043sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1044 const GrContextOptions& options) {
1045 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1046
1047 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1048 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001049 return nullptr;
1050 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001051
Robert Phillipsf4f80112020-07-13 16:13:31 -04001052 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001053}
1054#endif
1055
Stephen White985741a2019-07-18 11:43:45 -04001056#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001057/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001058sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001059 GrContextOptions defaultOptions;
1060 return MakeDawn(device, defaultOptions);
1061}
1062
Robert Phillipsf4f80112020-07-13 16:13:31 -04001063sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1064 const GrContextOptions& options) {
1065 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001066
Robert Phillipsf4f80112020-07-13 16:13:31 -04001067 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1068 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001069 return nullptr;
1070 }
1071
Robert Phillipsf4f80112020-07-13 16:13:31 -04001072 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001073}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001074
Stephen White985741a2019-07-18 11:43:45 -04001075#endif