blob: b6b6558c86279fe616c5a351b0f55bcdd9d6babc [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Mike Kleinc0bd9f92019-04-23 12:05:21 -050031#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#endif
33#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050036#ifdef SK_DIRECT3D
37#include "src/gpu/d3d/GrD3DGpu.h"
38#endif
Stephen White985741a2019-07-18 11:43:45 -040039#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050040#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040041#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040042#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050043
Brian Salomon24069eb2020-06-24 10:19:52 -040044#if GR_TEST_UTILS
45# include "include/utils/SkRandom.h"
46# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
47# include <sanitizer/lsan_interface.h>
48# endif
49#endif
50
Robert Phillips6db27c22019-05-01 10:43:56 -040051#ifdef SK_DISABLE_REDUCE_OPLIST_SPLITTING
Greg Danielf41b2bd2019-08-22 16:19:24 -040052static const bool kDefaultReduceOpsTaskSplitting = false;
Robert Phillips6db27c22019-05-01 10:43:56 -040053#else
Greg Danielf41b2bd2019-08-22 16:19:24 -040054static const bool kDefaultReduceOpsTaskSplitting = false;
Robert Phillips6db27c22019-05-01 10:43:56 -040055#endif
56
Adlai Holler9555f292020-10-09 09:41:14 -040057#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
58
Robert Phillipsad248452020-06-30 09:27:52 -040059GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040060 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040061}
Robert Phillipsa3457b82018-03-08 11:30:12 -050062
Robert Phillipsad248452020-06-30 09:27:52 -040063GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040064 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040065 // this if-test protects against the case where the context is being destroyed
66 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040067 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040068 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050069 }
Adlai Holler9555f292020-10-09 09:41:14 -040070
71 this->destroyDrawingManager();
72 fMappedBufferManager.reset();
73
74 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
75 if (fResourceCache) {
76 fResourceCache->releaseAll();
77 }
Robert Phillipsad248452020-06-30 09:27:52 -040078}
Robert Phillipsa3457b82018-03-08 11:30:12 -050079
Adlai Holler61a591c2020-10-12 12:38:33 -040080sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
81 return INHERITED::threadSafeProxy();
82}
83
Adlai Hollera7a40442020-10-09 09:49:42 -040084void GrDirectContext::resetGLTextureBindings() {
85 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
86 return;
87 }
88 fGpu->resetTextureBindings();
89}
90
91void GrDirectContext::resetContext(uint32_t state) {
92 ASSERT_SINGLE_OWNER
93 fGpu->markContextDirty(state);
94}
95
Robert Phillipsad248452020-06-30 09:27:52 -040096void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040097 if (INHERITED::abandoned()) {
98 return;
99 }
100
Robert Phillipsad248452020-06-30 09:27:52 -0400101 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400102
103 fStrikeCache->freeAll();
104
105 fMappedBufferManager->abandon();
106
107 fResourceProvider->abandon();
108
Robert Phillipseb999bc2020-11-03 08:41:47 -0500109 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400110 fResourceCache->abandonAll();
111
112 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
113
114 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400115 if (fSmallPathAtlasMgr) {
116 fSmallPathAtlasMgr->reset();
117 }
Robert Phillipsad248452020-06-30 09:27:52 -0400118 fAtlasManager->freeAll();
119}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500120
Adlai Hollera7a40442020-10-09 09:49:42 -0400121bool GrDirectContext::abandoned() {
122 if (INHERITED::abandoned()) {
123 return true;
124 }
125
126 if (fGpu && fGpu->isDeviceLost()) {
127 this->abandonContext();
128 return true;
129 }
130 return false;
131}
132
Adlai Holler61a591c2020-10-12 12:38:33 -0400133bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
134
Robert Phillipsad248452020-06-30 09:27:52 -0400135void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400136 if (INHERITED::abandoned()) {
137 return;
138 }
139
140 INHERITED::abandonContext();
141
142 fMappedBufferManager.reset();
143
144 fResourceProvider->abandon();
145
146 // Release all resources in the backend 3D API.
147 fResourceCache->releaseAll();
148
149 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400150 if (fSmallPathAtlasMgr) {
151 fSmallPathAtlasMgr->reset();
152 }
Robert Phillipsad248452020-06-30 09:27:52 -0400153 fAtlasManager->freeAll();
154}
Robert Phillips6db27c22019-05-01 10:43:56 -0400155
Robert Phillipsad248452020-06-30 09:27:52 -0400156void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400157 ASSERT_SINGLE_OWNER
158
159 if (this->abandoned()) {
160 return;
161 }
162
Robert Phillipsad248452020-06-30 09:27:52 -0400163 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400164 if (fSmallPathAtlasMgr) {
165 fSmallPathAtlasMgr->reset();
166 }
Robert Phillipsad248452020-06-30 09:27:52 -0400167 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500168
Adlai Holler4aa4c602020-10-12 13:58:52 -0400169 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
170 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
171 fStrikeCache->freeAll();
172
173 this->drawingManager()->freeGpuResources();
174
175 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400176}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500177
Robert Phillipsad248452020-06-30 09:27:52 -0400178bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400179 ASSERT_SINGLE_OWNER
180 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400181 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500182 }
183
Adlai Holler9555f292020-10-09 09:41:14 -0400184 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400185 if (!INHERITED::init()) {
186 return false;
187 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500188
Adlai Holler9555f292020-10-09 09:41:14 -0400189 SkASSERT(this->getTextBlobCache());
190 SkASSERT(this->threadSafeCache());
191
192 fStrikeCache = std::make_unique<GrStrikeCache>();
193 fResourceCache = std::make_unique<GrResourceCache>(this->caps(), this->singleOwner(),
194 this->contextID());
195 fResourceCache->setProxyProvider(this->proxyProvider());
196 fResourceCache->setThreadSafeCache(this->threadSafeCache());
197 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
198 this->singleOwner());
199 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
200
201 fDidTestPMConversions = false;
202
203 // DDL TODO: we need to think through how the task group & persistent cache
204 // get passed on to/shared between all the DDLRecorders created with this context.
205 if (this->options().fExecutor) {
206 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
207 }
208
209 fPersistentCache = this->options().fPersistentCache;
210 fShaderErrorHandler = this->options().fShaderErrorHandler;
211 if (!fShaderErrorHandler) {
212 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
213 }
214
Robert Phillipsad248452020-06-30 09:27:52 -0400215 bool reduceOpsTaskSplitting = kDefaultReduceOpsTaskSplitting;
216 if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) {
217 reduceOpsTaskSplitting = false;
218 } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) {
219 reduceOpsTaskSplitting = true;
220 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500221
Robert Phillipsad248452020-06-30 09:27:52 -0400222 this->setupDrawingManager(true, reduceOpsTaskSplitting);
223
224 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
225 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
226 // multitexturing supported only if range can represent the index + texcoords fully
227 !(this->caps()->shaderCaps()->floatIs32Bits() ||
228 this->caps()->shaderCaps()->integerSupport())) {
229 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
230 } else {
231 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
232 }
233
234 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
235
Robert Phillips3262bc82020-08-10 12:11:58 -0400236 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
237 this->options().fGlyphCacheTextureMaximumBytes,
238 allowMultitexturing);
239 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400240
241 return true;
242}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500243
Adlai Holler3a508e92020-10-12 13:58:01 -0400244void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
245 ASSERT_SINGLE_OWNER
246
247 if (resourceCount) {
248 *resourceCount = fResourceCache->getBudgetedResourceCount();
249 }
250 if (resourceBytes) {
251 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
252 }
253}
254
255size_t GrDirectContext::getResourceCachePurgeableBytes() const {
256 ASSERT_SINGLE_OWNER
257 return fResourceCache->getPurgeableBytes();
258}
259
260void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
261 ASSERT_SINGLE_OWNER
262 if (maxResources) {
263 *maxResources = -1;
264 }
265 if (maxResourceBytes) {
266 *maxResourceBytes = this->getResourceCacheLimit();
267 }
268}
269
270size_t GrDirectContext::getResourceCacheLimit() const {
271 ASSERT_SINGLE_OWNER
272 return fResourceCache->getMaxResourceBytes();
273}
274
275void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
276 ASSERT_SINGLE_OWNER
277 this->setResourceCacheLimit(maxResourceBytes);
278}
279
280void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
281 ASSERT_SINGLE_OWNER
282 fResourceCache->setLimit(maxResourceBytes);
283}
284
Adlai Holler4aa4c602020-10-12 13:58:52 -0400285void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
286 ASSERT_SINGLE_OWNER
287
288 if (this->abandoned()) {
289 return;
290 }
291
292 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
293 fResourceCache->purgeAsNeeded();
294
295 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
296 // place to purge stale blobs
297 this->getTextBlobCache()->purgeStaleBlobs();
298}
299
300void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
301 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
302
303 ASSERT_SINGLE_OWNER
304
305 if (this->abandoned()) {
306 return;
307 }
308
309 this->checkAsyncWorkCompletion();
310 fMappedBufferManager->process();
311 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
312
313 fResourceCache->purgeAsNeeded();
314 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
315
316 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
317 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
318 }
319
320 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
321 // place to purge stale blobs
322 this->getTextBlobCache()->purgeStaleBlobs();
323}
324
325void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
326 ASSERT_SINGLE_OWNER
327
328 if (this->abandoned()) {
329 return;
330 }
331
332 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
333}
334
Adlai Holler3acc69a2020-10-13 08:20:51 -0400335////////////////////////////////////////////////////////////////////////////////
336bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
337 bool deleteSemaphoresAfterWait) {
338 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
339 return false;
340 }
341 GrWrapOwnership ownership =
342 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
343 for (int i = 0; i < numSemaphores; ++i) {
344 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
345 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
346 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
347 // to begin with. Therefore, it is fine to not wait on it.
348 if (sema) {
349 fGpu->waitSemaphore(sema.get());
350 }
351 }
352 return true;
353}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400354
Robert Phillips5edf5102020-08-10 16:30:36 -0400355GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400356 if (!fSmallPathAtlasMgr) {
357 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
358
359 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
360 }
361
362 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
363 return nullptr;
364 }
365
366 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400367}
368
Adlai Holler3acc69a2020-10-13 08:20:51 -0400369////////////////////////////////////////////////////////////////////////////////
370
371GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
372 ASSERT_SINGLE_OWNER
373 if (this->abandoned()) {
374 if (info.fFinishedProc) {
375 info.fFinishedProc(info.fFinishedContext);
376 }
377 if (info.fSubmittedProc) {
378 info.fSubmittedProc(info.fSubmittedContext, false);
379 }
380 return GrSemaphoresSubmitted::kNo;
381 }
382
383 bool flushed = this->drawingManager()->flush(
384 nullptr, 0, SkSurface::BackendSurfaceAccess::kNoAccess, info, nullptr);
385
386 if (!flushed || (!this->priv().caps()->semaphoreSupport() && info.fNumSemaphores)) {
387 return GrSemaphoresSubmitted::kNo;
388 }
389 return GrSemaphoresSubmitted::kYes;
390}
391
392bool GrDirectContext::submit(bool syncCpu) {
393 ASSERT_SINGLE_OWNER
394 if (this->abandoned()) {
395 return false;
396 }
397
398 if (!fGpu) {
399 return false;
400 }
401
402 return fGpu->submitToGpu(syncCpu);
403}
404
405////////////////////////////////////////////////////////////////////////////////
406
407void GrDirectContext::checkAsyncWorkCompletion() {
408 if (fGpu) {
409 fGpu->checkFinishProcs();
410 }
411}
412
413////////////////////////////////////////////////////////////////////////////////
414
415void GrDirectContext::storeVkPipelineCacheData() {
416 if (fGpu) {
417 fGpu->storeVkPipelineCacheData();
418 }
419}
420
421////////////////////////////////////////////////////////////////////////////////
422
423bool GrDirectContext::supportsDistanceFieldText() const {
424 return this->caps()->shaderCaps()->supportsDistanceFieldText();
425}
426
427//////////////////////////////////////////////////////////////////////////////
428
429void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
430 ASSERT_SINGLE_OWNER
431 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
432 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
433 this->getTextBlobCache()->usedBytes());
434}
435
436size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
437 bool useNextPow2) {
438 if (!image->isTextureBacked()) {
439 return 0;
440 }
441 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
442 GrTextureProxy* proxy = gpuImage->peekProxy();
443 if (!proxy) {
444 return 0;
445 }
446
447 int colorSamplesPerPixel = 1;
448 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
449 colorSamplesPerPixel, mipMapped, useNextPow2);
450}
451
Adlai Holler98dd0042020-10-13 10:04:00 -0400452GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
453 const GrBackendFormat& backendFormat,
454 GrMipmapped mipMapped,
455 GrRenderable renderable,
456 GrProtected isProtected) {
457 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
458 if (this->abandoned()) {
459 return GrBackendTexture();
460 }
461
462 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
463 mipMapped, isProtected);
464}
465
466GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
467 SkColorType skColorType,
468 GrMipmapped mipMapped,
469 GrRenderable renderable,
470 GrProtected isProtected) {
471 if (this->abandoned()) {
472 return GrBackendTexture();
473 }
474
475 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
476
477 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
478}
479
480static GrBackendTexture create_and_update_backend_texture(
481 GrDirectContext* dContext,
482 SkISize dimensions,
483 const GrBackendFormat& backendFormat,
484 GrMipmapped mipMapped,
485 GrRenderable renderable,
486 GrProtected isProtected,
487 sk_sp<GrRefCntedCallback> finishedCallback,
488 const GrGpu::BackendTextureData* data) {
489 GrGpu* gpu = dContext->priv().getGpu();
490
491 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
492 mipMapped, isProtected);
493 if (!beTex.isValid()) {
494 return {};
495 }
496
497 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
498 std::move(finishedCallback),
499 data)) {
500 dContext->deleteBackendTexture(beTex);
501 return {};
502 }
503 return beTex;
504}
505
506GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
507 const GrBackendFormat& backendFormat,
508 const SkColor4f& color,
509 GrMipmapped mipMapped,
510 GrRenderable renderable,
511 GrProtected isProtected,
512 GrGpuFinishedProc finishedProc,
513 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500514 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400515
516 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
517 if (this->abandoned()) {
518 return {};
519 }
520
521 GrGpu::BackendTextureData data(color);
522 return create_and_update_backend_texture(this, {width, height},
523 backendFormat, mipMapped, renderable, isProtected,
524 std::move(finishedCallback), &data);
525}
526
527GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
528 SkColorType skColorType,
529 const SkColor4f& color,
530 GrMipmapped mipMapped,
531 GrRenderable renderable,
532 GrProtected isProtected,
533 GrGpuFinishedProc finishedProc,
534 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500535 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400536
537 if (this->abandoned()) {
538 return {};
539 }
540
541 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
542 if (!format.isValid()) {
543 return {};
544 }
545
546 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
547 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
548
549 GrGpu::BackendTextureData data(swizzledColor);
550 return create_and_update_backend_texture(this, {width, height}, format,
551 mipMapped, renderable, isProtected,
552 std::move(finishedCallback), &data);
553}
554
555GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
556 int numProvidedLevels,
557 GrRenderable renderable,
558 GrProtected isProtected,
559 GrGpuFinishedProc finishedProc,
560 GrGpuFinishedContext finishedContext) {
561 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
562
Brian Salomonb2c42142020-11-03 11:01:14 -0500563 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400564
565 if (this->abandoned()) {
566 return {};
567 }
568
569 if (!srcData || numProvidedLevels <= 0) {
570 return {};
571 }
572
573 int baseWidth = srcData[0].width();
574 int baseHeight = srcData[0].height();
575 SkColorType colorType = srcData[0].colorType();
576
577 GrMipmapped mipMapped = GrMipmapped::kNo;
578 int numExpectedLevels = 1;
579 if (numProvidedLevels > 1) {
580 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
581 mipMapped = GrMipmapped::kYes;
582 }
583
584 if (numProvidedLevels != numExpectedLevels) {
585 return {};
586 }
587
588 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
589
590 GrGpu::BackendTextureData data(srcData);
591 return create_and_update_backend_texture(this, {baseWidth, baseHeight},
592 backendFormat, mipMapped, renderable, isProtected,
593 std::move(finishedCallback), &data);
594}
595
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400596bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
597 const SkColor4f& color,
598 GrGpuFinishedProc finishedProc,
599 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500600 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400601
602 if (this->abandoned()) {
603 return false;
604 }
605
606 GrGpu::BackendTextureData data(color);
607 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
608}
609
610bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
611 SkColorType skColorType,
612 const SkColor4f& color,
613 GrGpuFinishedProc finishedProc,
614 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500615 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400616
617 if (this->abandoned()) {
618 return false;
619 }
620
621 GrBackendFormat format = backendTexture.getBackendFormat();
622 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
623
624 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
625 return false;
626 }
627
628 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
629 GrGpu::BackendTextureData data(swizzle.applyTo(color));
630
631 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
632}
633
634bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
635 const SkPixmap srcData[],
636 int numLevels,
637 GrGpuFinishedProc finishedProc,
638 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500639 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400640
641 if (this->abandoned()) {
642 return false;
643 }
644
645 if (!srcData || numLevels <= 0) {
646 return false;
647 }
648
649 int numExpectedLevels = 1;
650 if (backendTexture.hasMipmaps()) {
651 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
652 backendTexture.height()) + 1;
653 }
654 if (numLevels != numExpectedLevels) {
655 return false;
656 }
657
658 GrGpu::BackendTextureData data(srcData);
659 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
660}
661
Adlai Holler64e13832020-10-13 08:21:56 -0400662//////////////////////////////////////////////////////////////////////////////
663
664static GrBackendTexture create_and_update_compressed_backend_texture(
665 GrDirectContext* dContext,
666 SkISize dimensions,
667 const GrBackendFormat& backendFormat,
668 GrMipmapped mipMapped,
669 GrProtected isProtected,
670 sk_sp<GrRefCntedCallback> finishedCallback,
671 const GrGpu::BackendTextureData* data) {
672 GrGpu* gpu = dContext->priv().getGpu();
673
674 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
675 mipMapped, isProtected);
676 if (!beTex.isValid()) {
677 return {};
678 }
679
680 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
681 beTex, std::move(finishedCallback), data)) {
682 dContext->deleteBackendTexture(beTex);
683 return {};
684 }
685 return beTex;
686}
687
688GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
689 const GrBackendFormat& backendFormat,
690 const SkColor4f& color,
691 GrMipmapped mipMapped,
692 GrProtected isProtected,
693 GrGpuFinishedProc finishedProc,
694 GrGpuFinishedContext finishedContext) {
695 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomonb2c42142020-11-03 11:01:14 -0500696 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400697
698 if (this->abandoned()) {
699 return {};
700 }
701
702 GrGpu::BackendTextureData data(color);
703 return create_and_update_compressed_backend_texture(this, {width, height},
704 backendFormat, mipMapped, isProtected,
705 std::move(finishedCallback), &data);
706}
707
708GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
709 SkImage::CompressionType compression,
710 const SkColor4f& color,
711 GrMipmapped mipMapped,
712 GrProtected isProtected,
713 GrGpuFinishedProc finishedProc,
714 GrGpuFinishedContext finishedContext) {
715 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
716 GrBackendFormat format = this->compressedBackendFormat(compression);
717 return this->createCompressedBackendTexture(width, height, format, color,
718 mipMapped, isProtected, finishedProc,
719 finishedContext);
720}
721
722GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
723 const GrBackendFormat& backendFormat,
724 const void* compressedData,
725 size_t dataSize,
726 GrMipmapped mipMapped,
727 GrProtected isProtected,
728 GrGpuFinishedProc finishedProc,
729 GrGpuFinishedContext finishedContext) {
730 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomonb2c42142020-11-03 11:01:14 -0500731 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400732
733 if (this->abandoned()) {
734 return {};
735 }
736
737 GrGpu::BackendTextureData data(compressedData, dataSize);
738 return create_and_update_compressed_backend_texture(this, {width, height},
739 backendFormat, mipMapped, isProtected,
740 std::move(finishedCallback), &data);
741}
742
743GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
744 SkImage::CompressionType compression,
745 const void* data, size_t dataSize,
746 GrMipmapped mipMapped,
747 GrProtected isProtected,
748 GrGpuFinishedProc finishedProc,
749 GrGpuFinishedContext finishedContext) {
750 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
751 GrBackendFormat format = this->compressedBackendFormat(compression);
752 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
753 isProtected, finishedProc, finishedContext);
754}
755
756bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
757 const SkColor4f& color,
758 GrGpuFinishedProc finishedProc,
759 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500760 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400761
762 if (this->abandoned()) {
763 return false;
764 }
765
766 GrGpu::BackendTextureData data(color);
767 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
768}
769
770bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
771 const void* compressedData,
772 size_t dataSize,
773 GrGpuFinishedProc finishedProc,
774 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500775 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400776
777 if (this->abandoned()) {
778 return false;
779 }
780
781 if (!compressedData) {
782 return false;
783 }
784
785 GrGpu::BackendTextureData data(compressedData, dataSize);
786
787 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
788}
789
Adlai Holler6d0745b2020-10-13 13:29:00 -0400790//////////////////////////////////////////////////////////////////////////////
791
792bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
793 const GrBackendSurfaceMutableState& state,
794 GrBackendSurfaceMutableState* previousState,
795 GrGpuFinishedProc finishedProc,
796 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500797 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400798
799 if (this->abandoned()) {
800 return false;
801 }
802
803 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
804}
805
806
807bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
808 const GrBackendSurfaceMutableState& state,
809 GrBackendSurfaceMutableState* previousState,
810 GrGpuFinishedProc finishedProc,
811 GrGpuFinishedContext finishedContext) {
Brian Salomonb2c42142020-11-03 11:01:14 -0500812 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400813
814 if (this->abandoned()) {
815 return false;
816 }
817
818 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
819 std::move(callback));
820}
821
822void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
823 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
824 // For the Vulkan backend we still must destroy the backend texture when the context is
825 // abandoned.
826 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
827 return;
828 }
829
830 fGpu->deleteBackendTexture(backendTex);
831}
832
833//////////////////////////////////////////////////////////////////////////////
834
835bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
836 return fGpu->precompileShader(key, data);
837}
838
839#ifdef SK_ENABLE_DUMP_GPU
840#include "include/core/SkString.h"
841#include "src/utils/SkJSONWriter.h"
842SkString GrDirectContext::dump() const {
843 SkDynamicMemoryWStream stream;
844 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
845 writer.beginObject();
846
847 writer.appendString("backend", GrBackendApiToStr(this->backend()));
848
849 writer.appendName("caps");
850 this->caps()->dumpJSON(&writer);
851
852 writer.appendName("gpu");
853 this->fGpu->dumpJSON(&writer);
854
855 writer.appendName("context");
856 this->dumpJSON(&writer);
857
858 // Flush JSON to the memory stream
859 writer.endObject();
860 writer.flush();
861
862 // Null terminate the JSON data in the memory stream
863 stream.write8(0);
864
865 // Allocate a string big enough to hold all the data, then copy out of the stream
866 SkString result(stream.bytesWritten());
867 stream.copyToAndReset(result.writable_str());
868 return result;
869}
870#endif
871
John Rosascoa9b348f2019-11-08 13:18:15 -0800872#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400873
Robert Phillipsf4f80112020-07-13 16:13:31 -0400874/*************************************************************************************************/
875sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500876 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500877 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500878}
879
Robert Phillipsf4f80112020-07-13 16:13:31 -0400880sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400881 return MakeGL(nullptr, options);
882}
883
Robert Phillipsf4f80112020-07-13 16:13:31 -0400884sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400885 GrContextOptions defaultOptions;
886 return MakeGL(nullptr, defaultOptions);
887}
888
Brian Salomon24069eb2020-06-24 10:19:52 -0400889#if GR_TEST_UTILS
890GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
891 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
892 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
893 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
894 // on the thing it captures. So we leak the context.
895 struct GetErrorContext {
896 SkRandom fRandom;
897 GrGLFunction<GrGLGetErrorFn> fGetError;
898 };
899
900 auto errorContext = new GetErrorContext;
901
902#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
903 __lsan_ignore_object(errorContext);
904#endif
905
906 errorContext->fGetError = original;
907
908 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
909 GrGLenum error = errorContext->fGetError();
910 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
911 error = GR_GL_OUT_OF_MEMORY;
912 }
913 return error;
914 });
915}
916#endif
917
Robert Phillipsf4f80112020-07-13 16:13:31 -0400918sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
919 const GrContextOptions& options) {
920 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400921#if GR_TEST_UTILS
922 if (options.fRandomGLOOM) {
923 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
924 copy->fFunctions.fGetError =
925 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
926#if GR_GL_CHECK_ERROR
927 // Suppress logging GL errors since we'll be synthetically generating them.
928 copy->suppressErrorLogging();
929#endif
930 glInterface = std::move(copy);
931 }
932#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400933 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
934 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500935 return nullptr;
936 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400937 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500938}
John Rosascoa9b348f2019-11-08 13:18:15 -0800939#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500940
Robert Phillipsf4f80112020-07-13 16:13:31 -0400941/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400942sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
943 GrContextOptions defaultOptions;
944 return MakeMock(mockOptions, defaultOptions);
945}
946
947sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
948 const GrContextOptions& options) {
949 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
950
951 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
952 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500953 return nullptr;
954 }
Chris Daltona378b452019-12-11 13:24:11 -0500955
Robert Phillipsf4f80112020-07-13 16:13:31 -0400956 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500957}
958
Greg Danielb4d89562018-10-03 18:44:49 +0000959#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -0400960/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400961sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
962 GrContextOptions defaultOptions;
963 return MakeVulkan(backendContext, defaultOptions);
964}
965
966sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
967 const GrContextOptions& options) {
968 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
969
970 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
971 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500972 return nullptr;
973 }
974
Robert Phillipsf4f80112020-07-13 16:13:31 -0400975 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +0000976}
Robert Phillipsf4f80112020-07-13 16:13:31 -0400977#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500978
979#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -0400980/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400981sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500982 GrContextOptions defaultOptions;
983 return MakeMetal(device, queue, defaultOptions);
984}
985
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
987 const GrContextOptions& options) {
988 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -0500989
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990 direct->fGpu = GrMtlTrampoline::MakeGpu(direct.get(), options, device, queue);
991 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500992 return nullptr;
993 }
Timothy Liang4e85e802018-06-28 16:37:18 -0400994
Robert Phillipsf4f80112020-07-13 16:13:31 -0400995 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500996}
997#endif
998
Jim Van Verthb01e12b2020-02-18 14:34:38 -0500999#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001000/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001001sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1002 GrContextOptions defaultOptions;
1003 return MakeDirect3D(backendContext, defaultOptions);
1004}
1005
1006sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1007 const GrContextOptions& options) {
1008 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1009
1010 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1011 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001012 return nullptr;
1013 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001014
Robert Phillipsf4f80112020-07-13 16:13:31 -04001015 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001016}
1017#endif
1018
Stephen White985741a2019-07-18 11:43:45 -04001019#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001020/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001021sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001022 GrContextOptions defaultOptions;
1023 return MakeDawn(device, defaultOptions);
1024}
1025
Robert Phillipsf4f80112020-07-13 16:13:31 -04001026sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1027 const GrContextOptions& options) {
1028 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001029
Robert Phillipsf4f80112020-07-13 16:13:31 -04001030 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1031 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001032 return nullptr;
1033 }
1034
Robert Phillipsf4f80112020-07-13 16:13:31 -04001035 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001036}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001037
Stephen White985741a2019-07-18 11:43:45 -04001038#endif