blob: bd4bf608694007b3b10ec2421316ac4c9082bef1 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Mike Kleinc0bd9f92019-04-23 12:05:21 -050031#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#endif
33#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050036#ifdef SK_DIRECT3D
37#include "src/gpu/d3d/GrD3DGpu.h"
38#endif
Stephen White985741a2019-07-18 11:43:45 -040039#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050040#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040041#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040042#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050043
Brian Salomon24069eb2020-06-24 10:19:52 -040044#if GR_TEST_UTILS
45# include "include/utils/SkRandom.h"
46# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
47# include <sanitizer/lsan_interface.h>
48# endif
49#endif
50
Adlai Holler9555f292020-10-09 09:41:14 -040051#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
52
Robert Phillipsad248452020-06-30 09:27:52 -040053GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040054 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040055}
Robert Phillipsa3457b82018-03-08 11:30:12 -050056
Robert Phillipsad248452020-06-30 09:27:52 -040057GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040058 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040059 // this if-test protects against the case where the context is being destroyed
60 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040061 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040062 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050063 }
Adlai Holler9555f292020-10-09 09:41:14 -040064
65 this->destroyDrawingManager();
66 fMappedBufferManager.reset();
67
68 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
69 if (fResourceCache) {
70 fResourceCache->releaseAll();
71 }
Robert Phillipsad248452020-06-30 09:27:52 -040072}
Robert Phillipsa3457b82018-03-08 11:30:12 -050073
Adlai Holler61a591c2020-10-12 12:38:33 -040074sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
75 return INHERITED::threadSafeProxy();
76}
77
Adlai Hollera7a40442020-10-09 09:49:42 -040078void GrDirectContext::resetGLTextureBindings() {
79 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
80 return;
81 }
82 fGpu->resetTextureBindings();
83}
84
85void GrDirectContext::resetContext(uint32_t state) {
86 ASSERT_SINGLE_OWNER
87 fGpu->markContextDirty(state);
88}
89
Robert Phillipsad248452020-06-30 09:27:52 -040090void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040091 if (INHERITED::abandoned()) {
92 return;
93 }
94
Robert Phillipsad248452020-06-30 09:27:52 -040095 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -040096
97 fStrikeCache->freeAll();
98
99 fMappedBufferManager->abandon();
100
101 fResourceProvider->abandon();
102
Robert Phillipseb999bc2020-11-03 08:41:47 -0500103 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400104 fResourceCache->abandonAll();
105
106 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
107
108 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400109 if (fSmallPathAtlasMgr) {
110 fSmallPathAtlasMgr->reset();
111 }
Robert Phillipsad248452020-06-30 09:27:52 -0400112 fAtlasManager->freeAll();
113}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500114
Adlai Hollera7a40442020-10-09 09:49:42 -0400115bool GrDirectContext::abandoned() {
116 if (INHERITED::abandoned()) {
117 return true;
118 }
119
120 if (fGpu && fGpu->isDeviceLost()) {
121 this->abandonContext();
122 return true;
123 }
124 return false;
125}
126
Adlai Holler61a591c2020-10-12 12:38:33 -0400127bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
128
Robert Phillipsad248452020-06-30 09:27:52 -0400129void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400130 if (INHERITED::abandoned()) {
131 return;
132 }
133
134 INHERITED::abandonContext();
135
136 fMappedBufferManager.reset();
137
138 fResourceProvider->abandon();
139
140 // Release all resources in the backend 3D API.
141 fResourceCache->releaseAll();
142
143 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400144 if (fSmallPathAtlasMgr) {
145 fSmallPathAtlasMgr->reset();
146 }
Robert Phillipsad248452020-06-30 09:27:52 -0400147 fAtlasManager->freeAll();
148}
Robert Phillips6db27c22019-05-01 10:43:56 -0400149
Robert Phillipsad248452020-06-30 09:27:52 -0400150void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400151 ASSERT_SINGLE_OWNER
152
153 if (this->abandoned()) {
154 return;
155 }
156
Robert Phillipsad248452020-06-30 09:27:52 -0400157 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400158 if (fSmallPathAtlasMgr) {
159 fSmallPathAtlasMgr->reset();
160 }
Robert Phillipsad248452020-06-30 09:27:52 -0400161 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500162
Adlai Holler4aa4c602020-10-12 13:58:52 -0400163 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
164 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
165 fStrikeCache->freeAll();
166
167 this->drawingManager()->freeGpuResources();
168
169 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400170}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500171
Robert Phillipsad248452020-06-30 09:27:52 -0400172bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400173 ASSERT_SINGLE_OWNER
174 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400175 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500176 }
177
Adlai Holler9555f292020-10-09 09:41:14 -0400178 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400179 if (!INHERITED::init()) {
180 return false;
181 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500182
Adlai Holler9555f292020-10-09 09:41:14 -0400183 SkASSERT(this->getTextBlobCache());
184 SkASSERT(this->threadSafeCache());
185
186 fStrikeCache = std::make_unique<GrStrikeCache>();
187 fResourceCache = std::make_unique<GrResourceCache>(this->caps(), this->singleOwner(),
188 this->contextID());
189 fResourceCache->setProxyProvider(this->proxyProvider());
190 fResourceCache->setThreadSafeCache(this->threadSafeCache());
191 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
192 this->singleOwner());
193 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
194
195 fDidTestPMConversions = false;
196
197 // DDL TODO: we need to think through how the task group & persistent cache
198 // get passed on to/shared between all the DDLRecorders created with this context.
199 if (this->options().fExecutor) {
200 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
201 }
202
203 fPersistentCache = this->options().fPersistentCache;
204 fShaderErrorHandler = this->options().fShaderErrorHandler;
205 if (!fShaderErrorHandler) {
206 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
207 }
208
Robert Phillipsad248452020-06-30 09:27:52 -0400209 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
210 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
211 // multitexturing supported only if range can represent the index + texcoords fully
212 !(this->caps()->shaderCaps()->floatIs32Bits() ||
213 this->caps()->shaderCaps()->integerSupport())) {
214 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
215 } else {
216 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
217 }
218
219 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
220
Robert Phillips3262bc82020-08-10 12:11:58 -0400221 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
222 this->options().fGlyphCacheTextureMaximumBytes,
223 allowMultitexturing);
224 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400225
226 return true;
227}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500228
Adlai Holler3a508e92020-10-12 13:58:01 -0400229void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
230 ASSERT_SINGLE_OWNER
231
232 if (resourceCount) {
233 *resourceCount = fResourceCache->getBudgetedResourceCount();
234 }
235 if (resourceBytes) {
236 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
237 }
238}
239
240size_t GrDirectContext::getResourceCachePurgeableBytes() const {
241 ASSERT_SINGLE_OWNER
242 return fResourceCache->getPurgeableBytes();
243}
244
245void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
246 ASSERT_SINGLE_OWNER
247 if (maxResources) {
248 *maxResources = -1;
249 }
250 if (maxResourceBytes) {
251 *maxResourceBytes = this->getResourceCacheLimit();
252 }
253}
254
255size_t GrDirectContext::getResourceCacheLimit() const {
256 ASSERT_SINGLE_OWNER
257 return fResourceCache->getMaxResourceBytes();
258}
259
260void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
261 ASSERT_SINGLE_OWNER
262 this->setResourceCacheLimit(maxResourceBytes);
263}
264
265void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
266 ASSERT_SINGLE_OWNER
267 fResourceCache->setLimit(maxResourceBytes);
268}
269
Adlai Holler4aa4c602020-10-12 13:58:52 -0400270void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
271 ASSERT_SINGLE_OWNER
272
273 if (this->abandoned()) {
274 return;
275 }
276
277 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
278 fResourceCache->purgeAsNeeded();
279
280 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
281 // place to purge stale blobs
282 this->getTextBlobCache()->purgeStaleBlobs();
283}
284
285void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
286 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
287
288 ASSERT_SINGLE_OWNER
289
290 if (this->abandoned()) {
291 return;
292 }
293
294 this->checkAsyncWorkCompletion();
295 fMappedBufferManager->process();
296 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
297
298 fResourceCache->purgeAsNeeded();
299 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
300
301 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
302 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
303 }
304
305 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
306 // place to purge stale blobs
307 this->getTextBlobCache()->purgeStaleBlobs();
308}
309
310void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
311 ASSERT_SINGLE_OWNER
312
313 if (this->abandoned()) {
314 return;
315 }
316
317 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
318}
319
Adlai Holler3acc69a2020-10-13 08:20:51 -0400320////////////////////////////////////////////////////////////////////////////////
321bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
322 bool deleteSemaphoresAfterWait) {
323 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
324 return false;
325 }
326 GrWrapOwnership ownership =
327 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
328 for (int i = 0; i < numSemaphores; ++i) {
329 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
330 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
331 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
332 // to begin with. Therefore, it is fine to not wait on it.
333 if (sema) {
334 fGpu->waitSemaphore(sema.get());
335 }
336 }
337 return true;
338}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400339
Robert Phillips5edf5102020-08-10 16:30:36 -0400340GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400341 if (!fSmallPathAtlasMgr) {
342 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
343
344 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
345 }
346
347 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
348 return nullptr;
349 }
350
351 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400352}
353
Adlai Holler3acc69a2020-10-13 08:20:51 -0400354////////////////////////////////////////////////////////////////////////////////
355
356GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
357 ASSERT_SINGLE_OWNER
358 if (this->abandoned()) {
359 if (info.fFinishedProc) {
360 info.fFinishedProc(info.fFinishedContext);
361 }
362 if (info.fSubmittedProc) {
363 info.fSubmittedProc(info.fSubmittedContext, false);
364 }
365 return GrSemaphoresSubmitted::kNo;
366 }
367
368 bool flushed = this->drawingManager()->flush(
Adlai Hollerc2bfcff2020-11-06 15:39:36 -0500369 {}, SkSurface::BackendSurfaceAccess::kNoAccess, info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400370
371 if (!flushed || (!this->priv().caps()->semaphoreSupport() && info.fNumSemaphores)) {
372 return GrSemaphoresSubmitted::kNo;
373 }
374 return GrSemaphoresSubmitted::kYes;
375}
376
377bool GrDirectContext::submit(bool syncCpu) {
378 ASSERT_SINGLE_OWNER
379 if (this->abandoned()) {
380 return false;
381 }
382
383 if (!fGpu) {
384 return false;
385 }
386
387 return fGpu->submitToGpu(syncCpu);
388}
389
390////////////////////////////////////////////////////////////////////////////////
391
392void GrDirectContext::checkAsyncWorkCompletion() {
393 if (fGpu) {
394 fGpu->checkFinishProcs();
395 }
396}
397
398////////////////////////////////////////////////////////////////////////////////
399
400void GrDirectContext::storeVkPipelineCacheData() {
401 if (fGpu) {
402 fGpu->storeVkPipelineCacheData();
403 }
404}
405
406////////////////////////////////////////////////////////////////////////////////
407
408bool GrDirectContext::supportsDistanceFieldText() const {
409 return this->caps()->shaderCaps()->supportsDistanceFieldText();
410}
411
412//////////////////////////////////////////////////////////////////////////////
413
414void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
415 ASSERT_SINGLE_OWNER
416 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
417 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
418 this->getTextBlobCache()->usedBytes());
419}
420
421size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
422 bool useNextPow2) {
423 if (!image->isTextureBacked()) {
424 return 0;
425 }
426 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
427 GrTextureProxy* proxy = gpuImage->peekProxy();
428 if (!proxy) {
429 return 0;
430 }
431
432 int colorSamplesPerPixel = 1;
433 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
434 colorSamplesPerPixel, mipMapped, useNextPow2);
435}
436
Adlai Holler98dd0042020-10-13 10:04:00 -0400437GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
438 const GrBackendFormat& backendFormat,
439 GrMipmapped mipMapped,
440 GrRenderable renderable,
441 GrProtected isProtected) {
442 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
443 if (this->abandoned()) {
444 return GrBackendTexture();
445 }
446
447 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
448 mipMapped, isProtected);
449}
450
451GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
452 SkColorType skColorType,
453 GrMipmapped mipMapped,
454 GrRenderable renderable,
455 GrProtected isProtected) {
456 if (this->abandoned()) {
457 return GrBackendTexture();
458 }
459
460 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
461
462 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
463}
464
465static GrBackendTexture create_and_update_backend_texture(
466 GrDirectContext* dContext,
467 SkISize dimensions,
468 const GrBackendFormat& backendFormat,
469 GrMipmapped mipMapped,
470 GrRenderable renderable,
471 GrProtected isProtected,
472 sk_sp<GrRefCntedCallback> finishedCallback,
473 const GrGpu::BackendTextureData* data) {
474 GrGpu* gpu = dContext->priv().getGpu();
475
476 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
477 mipMapped, isProtected);
478 if (!beTex.isValid()) {
479 return {};
480 }
481
482 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
483 std::move(finishedCallback),
484 data)) {
485 dContext->deleteBackendTexture(beTex);
486 return {};
487 }
488 return beTex;
489}
490
491GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
492 const GrBackendFormat& backendFormat,
493 const SkColor4f& color,
494 GrMipmapped mipMapped,
495 GrRenderable renderable,
496 GrProtected isProtected,
497 GrGpuFinishedProc finishedProc,
498 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500499 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400500
501 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
502 if (this->abandoned()) {
503 return {};
504 }
505
506 GrGpu::BackendTextureData data(color);
507 return create_and_update_backend_texture(this, {width, height},
508 backendFormat, mipMapped, renderable, isProtected,
509 std::move(finishedCallback), &data);
510}
511
512GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
513 SkColorType skColorType,
514 const SkColor4f& color,
515 GrMipmapped mipMapped,
516 GrRenderable renderable,
517 GrProtected isProtected,
518 GrGpuFinishedProc finishedProc,
519 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500520 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400521
522 if (this->abandoned()) {
523 return {};
524 }
525
526 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
527 if (!format.isValid()) {
528 return {};
529 }
530
531 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
532 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
533
534 GrGpu::BackendTextureData data(swizzledColor);
535 return create_and_update_backend_texture(this, {width, height}, format,
536 mipMapped, renderable, isProtected,
537 std::move(finishedCallback), &data);
538}
539
540GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
541 int numProvidedLevels,
542 GrRenderable renderable,
543 GrProtected isProtected,
544 GrGpuFinishedProc finishedProc,
545 GrGpuFinishedContext finishedContext) {
546 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
547
Brian Salomon694ff172020-11-04 16:54:28 -0500548 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400549
550 if (this->abandoned()) {
551 return {};
552 }
553
554 if (!srcData || numProvidedLevels <= 0) {
555 return {};
556 }
557
558 int baseWidth = srcData[0].width();
559 int baseHeight = srcData[0].height();
560 SkColorType colorType = srcData[0].colorType();
561
562 GrMipmapped mipMapped = GrMipmapped::kNo;
563 int numExpectedLevels = 1;
564 if (numProvidedLevels > 1) {
565 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
566 mipMapped = GrMipmapped::kYes;
567 }
568
569 if (numProvidedLevels != numExpectedLevels) {
570 return {};
571 }
572
573 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
574
575 GrGpu::BackendTextureData data(srcData);
576 return create_and_update_backend_texture(this, {baseWidth, baseHeight},
577 backendFormat, mipMapped, renderable, isProtected,
578 std::move(finishedCallback), &data);
579}
580
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400581bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
582 const SkColor4f& color,
583 GrGpuFinishedProc finishedProc,
584 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500585 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400586
587 if (this->abandoned()) {
588 return false;
589 }
590
591 GrGpu::BackendTextureData data(color);
592 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
593}
594
595bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
596 SkColorType skColorType,
597 const SkColor4f& color,
598 GrGpuFinishedProc finishedProc,
599 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500600 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400601
602 if (this->abandoned()) {
603 return false;
604 }
605
606 GrBackendFormat format = backendTexture.getBackendFormat();
607 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
608
609 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
610 return false;
611 }
612
613 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
614 GrGpu::BackendTextureData data(swizzle.applyTo(color));
615
616 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
617}
618
619bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
620 const SkPixmap srcData[],
621 int numLevels,
622 GrGpuFinishedProc finishedProc,
623 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500624 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400625
626 if (this->abandoned()) {
627 return false;
628 }
629
630 if (!srcData || numLevels <= 0) {
631 return false;
632 }
633
634 int numExpectedLevels = 1;
635 if (backendTexture.hasMipmaps()) {
636 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
637 backendTexture.height()) + 1;
638 }
639 if (numLevels != numExpectedLevels) {
640 return false;
641 }
642
643 GrGpu::BackendTextureData data(srcData);
644 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
645}
646
Adlai Holler64e13832020-10-13 08:21:56 -0400647//////////////////////////////////////////////////////////////////////////////
648
649static GrBackendTexture create_and_update_compressed_backend_texture(
650 GrDirectContext* dContext,
651 SkISize dimensions,
652 const GrBackendFormat& backendFormat,
653 GrMipmapped mipMapped,
654 GrProtected isProtected,
655 sk_sp<GrRefCntedCallback> finishedCallback,
656 const GrGpu::BackendTextureData* data) {
657 GrGpu* gpu = dContext->priv().getGpu();
658
659 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
660 mipMapped, isProtected);
661 if (!beTex.isValid()) {
662 return {};
663 }
664
665 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
666 beTex, std::move(finishedCallback), data)) {
667 dContext->deleteBackendTexture(beTex);
668 return {};
669 }
670 return beTex;
671}
672
673GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
674 const GrBackendFormat& backendFormat,
675 const SkColor4f& color,
676 GrMipmapped mipMapped,
677 GrProtected isProtected,
678 GrGpuFinishedProc finishedProc,
679 GrGpuFinishedContext finishedContext) {
680 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500681 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400682
683 if (this->abandoned()) {
684 return {};
685 }
686
687 GrGpu::BackendTextureData data(color);
688 return create_and_update_compressed_backend_texture(this, {width, height},
689 backendFormat, mipMapped, isProtected,
690 std::move(finishedCallback), &data);
691}
692
693GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
694 SkImage::CompressionType compression,
695 const SkColor4f& color,
696 GrMipmapped mipMapped,
697 GrProtected isProtected,
698 GrGpuFinishedProc finishedProc,
699 GrGpuFinishedContext finishedContext) {
700 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
701 GrBackendFormat format = this->compressedBackendFormat(compression);
702 return this->createCompressedBackendTexture(width, height, format, color,
703 mipMapped, isProtected, finishedProc,
704 finishedContext);
705}
706
707GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
708 const GrBackendFormat& backendFormat,
709 const void* compressedData,
710 size_t dataSize,
711 GrMipmapped mipMapped,
712 GrProtected isProtected,
713 GrGpuFinishedProc finishedProc,
714 GrGpuFinishedContext finishedContext) {
715 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500716 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400717
718 if (this->abandoned()) {
719 return {};
720 }
721
722 GrGpu::BackendTextureData data(compressedData, dataSize);
723 return create_and_update_compressed_backend_texture(this, {width, height},
724 backendFormat, mipMapped, isProtected,
725 std::move(finishedCallback), &data);
726}
727
728GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
729 SkImage::CompressionType compression,
730 const void* data, size_t dataSize,
731 GrMipmapped mipMapped,
732 GrProtected isProtected,
733 GrGpuFinishedProc finishedProc,
734 GrGpuFinishedContext finishedContext) {
735 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
736 GrBackendFormat format = this->compressedBackendFormat(compression);
737 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
738 isProtected, finishedProc, finishedContext);
739}
740
741bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
742 const SkColor4f& color,
743 GrGpuFinishedProc finishedProc,
744 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500745 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400746
747 if (this->abandoned()) {
748 return false;
749 }
750
751 GrGpu::BackendTextureData data(color);
752 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
753}
754
755bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
756 const void* compressedData,
757 size_t dataSize,
758 GrGpuFinishedProc finishedProc,
759 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500760 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400761
762 if (this->abandoned()) {
763 return false;
764 }
765
766 if (!compressedData) {
767 return false;
768 }
769
770 GrGpu::BackendTextureData data(compressedData, dataSize);
771
772 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
773}
774
Adlai Holler6d0745b2020-10-13 13:29:00 -0400775//////////////////////////////////////////////////////////////////////////////
776
777bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
778 const GrBackendSurfaceMutableState& state,
779 GrBackendSurfaceMutableState* previousState,
780 GrGpuFinishedProc finishedProc,
781 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500782 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400783
784 if (this->abandoned()) {
785 return false;
786 }
787
788 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
789}
790
791
792bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
793 const GrBackendSurfaceMutableState& state,
794 GrBackendSurfaceMutableState* previousState,
795 GrGpuFinishedProc finishedProc,
796 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500797 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400798
799 if (this->abandoned()) {
800 return false;
801 }
802
803 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
804 std::move(callback));
805}
806
807void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
808 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
809 // For the Vulkan backend we still must destroy the backend texture when the context is
810 // abandoned.
811 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
812 return;
813 }
814
815 fGpu->deleteBackendTexture(backendTex);
816}
817
818//////////////////////////////////////////////////////////////////////////////
819
820bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
821 return fGpu->precompileShader(key, data);
822}
823
824#ifdef SK_ENABLE_DUMP_GPU
825#include "include/core/SkString.h"
826#include "src/utils/SkJSONWriter.h"
827SkString GrDirectContext::dump() const {
828 SkDynamicMemoryWStream stream;
829 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
830 writer.beginObject();
831
832 writer.appendString("backend", GrBackendApiToStr(this->backend()));
833
834 writer.appendName("caps");
835 this->caps()->dumpJSON(&writer);
836
837 writer.appendName("gpu");
838 this->fGpu->dumpJSON(&writer);
839
840 writer.appendName("context");
841 this->dumpJSON(&writer);
842
843 // Flush JSON to the memory stream
844 writer.endObject();
845 writer.flush();
846
847 // Null terminate the JSON data in the memory stream
848 stream.write8(0);
849
850 // Allocate a string big enough to hold all the data, then copy out of the stream
851 SkString result(stream.bytesWritten());
852 stream.copyToAndReset(result.writable_str());
853 return result;
854}
855#endif
856
John Rosascoa9b348f2019-11-08 13:18:15 -0800857#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400858
Robert Phillipsf4f80112020-07-13 16:13:31 -0400859/*************************************************************************************************/
860sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500861 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500862 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500863}
864
Robert Phillipsf4f80112020-07-13 16:13:31 -0400865sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400866 return MakeGL(nullptr, options);
867}
868
Robert Phillipsf4f80112020-07-13 16:13:31 -0400869sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400870 GrContextOptions defaultOptions;
871 return MakeGL(nullptr, defaultOptions);
872}
873
Brian Salomon24069eb2020-06-24 10:19:52 -0400874#if GR_TEST_UTILS
875GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
876 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
877 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
878 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
879 // on the thing it captures. So we leak the context.
880 struct GetErrorContext {
881 SkRandom fRandom;
882 GrGLFunction<GrGLGetErrorFn> fGetError;
883 };
884
885 auto errorContext = new GetErrorContext;
886
887#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
888 __lsan_ignore_object(errorContext);
889#endif
890
891 errorContext->fGetError = original;
892
893 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
894 GrGLenum error = errorContext->fGetError();
895 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
896 error = GR_GL_OUT_OF_MEMORY;
897 }
898 return error;
899 });
900}
901#endif
902
Robert Phillipsf4f80112020-07-13 16:13:31 -0400903sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
904 const GrContextOptions& options) {
905 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400906#if GR_TEST_UTILS
907 if (options.fRandomGLOOM) {
908 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
909 copy->fFunctions.fGetError =
910 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
911#if GR_GL_CHECK_ERROR
912 // Suppress logging GL errors since we'll be synthetically generating them.
913 copy->suppressErrorLogging();
914#endif
915 glInterface = std::move(copy);
916 }
917#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400918 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
919 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500920 return nullptr;
921 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400922 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500923}
John Rosascoa9b348f2019-11-08 13:18:15 -0800924#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500925
Robert Phillipsf4f80112020-07-13 16:13:31 -0400926/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400927sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
928 GrContextOptions defaultOptions;
929 return MakeMock(mockOptions, defaultOptions);
930}
931
932sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
933 const GrContextOptions& options) {
934 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
935
936 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
937 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500938 return nullptr;
939 }
Chris Daltona378b452019-12-11 13:24:11 -0500940
Robert Phillipsf4f80112020-07-13 16:13:31 -0400941 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500942}
943
Greg Danielb4d89562018-10-03 18:44:49 +0000944#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -0400945/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400946sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
947 GrContextOptions defaultOptions;
948 return MakeVulkan(backendContext, defaultOptions);
949}
950
951sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
952 const GrContextOptions& options) {
953 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
954
955 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
956 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500957 return nullptr;
958 }
959
Robert Phillipsf4f80112020-07-13 16:13:31 -0400960 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +0000961}
Robert Phillipsf4f80112020-07-13 16:13:31 -0400962#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500963
964#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -0400965/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400966sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500967 GrContextOptions defaultOptions;
968 return MakeMetal(device, queue, defaultOptions);
969}
970
Robert Phillipsf4f80112020-07-13 16:13:31 -0400971sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
972 const GrContextOptions& options) {
973 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -0500974
Robert Phillipsf4f80112020-07-13 16:13:31 -0400975 direct->fGpu = GrMtlTrampoline::MakeGpu(direct.get(), options, device, queue);
976 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500977 return nullptr;
978 }
Timothy Liang4e85e802018-06-28 16:37:18 -0400979
Robert Phillipsf4f80112020-07-13 16:13:31 -0400980 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500981}
982#endif
983
Jim Van Verthb01e12b2020-02-18 14:34:38 -0500984#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -0400985/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
987 GrContextOptions defaultOptions;
988 return MakeDirect3D(backendContext, defaultOptions);
989}
990
991sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
992 const GrContextOptions& options) {
993 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
994
995 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
996 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500997 return nullptr;
998 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -0500999
Robert Phillipsf4f80112020-07-13 16:13:31 -04001000 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001001}
1002#endif
1003
Stephen White985741a2019-07-18 11:43:45 -04001004#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001005/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001006sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001007 GrContextOptions defaultOptions;
1008 return MakeDawn(device, defaultOptions);
1009}
1010
Robert Phillipsf4f80112020-07-13 16:13:31 -04001011sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1012 const GrContextOptions& options) {
1013 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001014
Robert Phillipsf4f80112020-07-13 16:13:31 -04001015 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1016 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001017 return nullptr;
1018 }
1019
Robert Phillipsf4f80112020-07-13 16:13:31 -04001020 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001021}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001022
Stephen White985741a2019-07-18 11:43:45 -04001023#endif