blob: cbd9d3b1de04ad6745cfb7ac924fe255bbf6b965 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040021#include "src/gpu/GrSurfaceContext.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040022#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050023#include "src/gpu/effects/GrSkSLFP.h"
24#include "src/gpu/gl/GrGLGpu.h"
25#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040026#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040027#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050028#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040029#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipse7a959d2021-03-11 14:44:42 -050054GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050055 static std::atomic<uint32_t> nextID{1};
56 uint32_t id;
57 do {
58 id = nextID.fetch_add(1, std::memory_order_relaxed);
59 } while (id == SK_InvalidUniqueID);
60 return DirectContextID(id);
61}
62
Robert Phillipsad248452020-06-30 09:27:52 -040063GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040064 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050065 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040066}
Robert Phillipsa3457b82018-03-08 11:30:12 -050067
Robert Phillipsad248452020-06-30 09:27:52 -040068GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040069 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040070 // this if-test protects against the case where the context is being destroyed
71 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040072 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040073 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050074 }
Adlai Holler9555f292020-10-09 09:41:14 -040075
Greg Daniela89b4302021-01-29 10:48:40 -050076 // We need to make sure all work is finished on the gpu before we start releasing resources.
77 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
78
Adlai Holler9555f292020-10-09 09:41:14 -040079 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040080
81 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
82 if (fResourceCache) {
83 fResourceCache->releaseAll();
84 }
Brian Salomon91a88f02021-02-04 15:34:32 -050085 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
86 // async pixel result don't try to destroy buffers off thread.
87 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040088}
Robert Phillipsa3457b82018-03-08 11:30:12 -050089
Adlai Holler61a591c2020-10-12 12:38:33 -040090sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
91 return INHERITED::threadSafeProxy();
92}
93
Adlai Hollera7a40442020-10-09 09:49:42 -040094void GrDirectContext::resetGLTextureBindings() {
95 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
96 return;
97 }
98 fGpu->resetTextureBindings();
99}
100
101void GrDirectContext::resetContext(uint32_t state) {
102 ASSERT_SINGLE_OWNER
103 fGpu->markContextDirty(state);
104}
105
Robert Phillipsad248452020-06-30 09:27:52 -0400106void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400107 if (INHERITED::abandoned()) {
108 return;
109 }
110
Robert Phillipsad248452020-06-30 09:27:52 -0400111 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400112
Greg Daniela89b4302021-01-29 10:48:40 -0500113 // We need to make sure all work is finished on the gpu before we start releasing resources.
114 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
115
Adlai Hollera7a40442020-10-09 09:49:42 -0400116 fStrikeCache->freeAll();
117
118 fMappedBufferManager->abandon();
119
120 fResourceProvider->abandon();
121
Robert Phillipseb999bc2020-11-03 08:41:47 -0500122 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400123 fResourceCache->abandonAll();
124
125 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
126
Brian Salomon91a88f02021-02-04 15:34:32 -0500127 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400128 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500129
Robert Phillips079455c2020-08-11 15:18:46 -0400130 if (fSmallPathAtlasMgr) {
131 fSmallPathAtlasMgr->reset();
132 }
Robert Phillipsad248452020-06-30 09:27:52 -0400133 fAtlasManager->freeAll();
134}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500135
Adlai Hollera7a40442020-10-09 09:49:42 -0400136bool GrDirectContext::abandoned() {
137 if (INHERITED::abandoned()) {
138 return true;
139 }
140
141 if (fGpu && fGpu->isDeviceLost()) {
142 this->abandonContext();
143 return true;
144 }
145 return false;
146}
147
Adlai Holler61a591c2020-10-12 12:38:33 -0400148bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
149
Robert Phillipsad248452020-06-30 09:27:52 -0400150void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400151 if (INHERITED::abandoned()) {
152 return;
153 }
154
155 INHERITED::abandonContext();
156
Greg Daniela89b4302021-01-29 10:48:40 -0500157 // We need to make sure all work is finished on the gpu before we start releasing resources.
158 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
159
Adlai Holler61a591c2020-10-12 12:38:33 -0400160 fResourceProvider->abandon();
161
162 // Release all resources in the backend 3D API.
163 fResourceCache->releaseAll();
164
Brian Salomon91a88f02021-02-04 15:34:32 -0500165 // Must be after GrResourceCache::releaseAll().
166 fMappedBufferManager.reset();
167
Adlai Holler61a591c2020-10-12 12:38:33 -0400168 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400169 if (fSmallPathAtlasMgr) {
170 fSmallPathAtlasMgr->reset();
171 }
Robert Phillipsad248452020-06-30 09:27:52 -0400172 fAtlasManager->freeAll();
173}
Robert Phillips6db27c22019-05-01 10:43:56 -0400174
Robert Phillipsad248452020-06-30 09:27:52 -0400175void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400176 ASSERT_SINGLE_OWNER
177
178 if (this->abandoned()) {
179 return;
180 }
181
Robert Phillipsad248452020-06-30 09:27:52 -0400182 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400183 if (fSmallPathAtlasMgr) {
184 fSmallPathAtlasMgr->reset();
185 }
Robert Phillipsad248452020-06-30 09:27:52 -0400186 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500187
Adlai Holler4aa4c602020-10-12 13:58:52 -0400188 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
189 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
190 fStrikeCache->freeAll();
191
192 this->drawingManager()->freeGpuResources();
193
194 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400195}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500196
Robert Phillipsad248452020-06-30 09:27:52 -0400197bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400198 ASSERT_SINGLE_OWNER
199 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400200 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500201 }
202
Robert Phillipsae67c522021-03-03 11:03:38 -0500203 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400204 if (!INHERITED::init()) {
205 return false;
206 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500207
Adlai Holler9555f292020-10-09 09:41:14 -0400208 SkASSERT(this->getTextBlobCache());
209 SkASSERT(this->threadSafeCache());
210
211 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400212 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
213 this->directContextID(),
214 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400215 fResourceCache->setProxyProvider(this->proxyProvider());
216 fResourceCache->setThreadSafeCache(this->threadSafeCache());
217 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
218 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500219 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400220
221 fDidTestPMConversions = false;
222
223 // DDL TODO: we need to think through how the task group & persistent cache
224 // get passed on to/shared between all the DDLRecorders created with this context.
225 if (this->options().fExecutor) {
226 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
227 }
228
229 fPersistentCache = this->options().fPersistentCache;
230 fShaderErrorHandler = this->options().fShaderErrorHandler;
231 if (!fShaderErrorHandler) {
232 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
233 }
234
Robert Phillipsad248452020-06-30 09:27:52 -0400235 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
236 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
237 // multitexturing supported only if range can represent the index + texcoords fully
238 !(this->caps()->shaderCaps()->floatIs32Bits() ||
239 this->caps()->shaderCaps()->integerSupport())) {
240 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
241 } else {
242 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
243 }
244
245 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
246
Robert Phillips3262bc82020-08-10 12:11:58 -0400247 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
248 this->options().fGlyphCacheTextureMaximumBytes,
249 allowMultitexturing);
250 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400251
252 return true;
253}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500254
Adlai Holler3a508e92020-10-12 13:58:01 -0400255void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
256 ASSERT_SINGLE_OWNER
257
258 if (resourceCount) {
259 *resourceCount = fResourceCache->getBudgetedResourceCount();
260 }
261 if (resourceBytes) {
262 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
263 }
264}
265
266size_t GrDirectContext::getResourceCachePurgeableBytes() const {
267 ASSERT_SINGLE_OWNER
268 return fResourceCache->getPurgeableBytes();
269}
270
271void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
272 ASSERT_SINGLE_OWNER
273 if (maxResources) {
274 *maxResources = -1;
275 }
276 if (maxResourceBytes) {
277 *maxResourceBytes = this->getResourceCacheLimit();
278 }
279}
280
281size_t GrDirectContext::getResourceCacheLimit() const {
282 ASSERT_SINGLE_OWNER
283 return fResourceCache->getMaxResourceBytes();
284}
285
286void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
287 ASSERT_SINGLE_OWNER
288 this->setResourceCacheLimit(maxResourceBytes);
289}
290
291void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
292 ASSERT_SINGLE_OWNER
293 fResourceCache->setLimit(maxResourceBytes);
294}
295
Adlai Holler4aa4c602020-10-12 13:58:52 -0400296void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
297 ASSERT_SINGLE_OWNER
298
299 if (this->abandoned()) {
300 return;
301 }
302
303 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
304 fResourceCache->purgeAsNeeded();
305
306 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
307 // place to purge stale blobs
308 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400309
310 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400311}
312
313void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
314 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
315
316 ASSERT_SINGLE_OWNER
317
318 if (this->abandoned()) {
319 return;
320 }
321
322 this->checkAsyncWorkCompletion();
323 fMappedBufferManager->process();
324 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
325
326 fResourceCache->purgeAsNeeded();
327 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
328
Adlai Holler4aa4c602020-10-12 13:58:52 -0400329 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
330 // place to purge stale blobs
331 this->getTextBlobCache()->purgeStaleBlobs();
332}
333
334void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
335 ASSERT_SINGLE_OWNER
336
337 if (this->abandoned()) {
338 return;
339 }
340
341 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
342}
343
Adlai Holler3acc69a2020-10-13 08:20:51 -0400344////////////////////////////////////////////////////////////////////////////////
345bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
346 bool deleteSemaphoresAfterWait) {
347 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
348 return false;
349 }
350 GrWrapOwnership ownership =
351 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
352 for (int i = 0; i < numSemaphores; ++i) {
353 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
354 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
355 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
356 // to begin with. Therefore, it is fine to not wait on it.
357 if (sema) {
358 fGpu->waitSemaphore(sema.get());
359 }
360 }
361 return true;
362}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400363
Robert Phillips5edf5102020-08-10 16:30:36 -0400364GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400365 if (!fSmallPathAtlasMgr) {
366 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
367
368 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
369 }
370
371 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
372 return nullptr;
373 }
374
375 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400376}
377
Adlai Holler3acc69a2020-10-13 08:20:51 -0400378////////////////////////////////////////////////////////////////////////////////
379
380GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
381 ASSERT_SINGLE_OWNER
382 if (this->abandoned()) {
383 if (info.fFinishedProc) {
384 info.fFinishedProc(info.fFinishedContext);
385 }
386 if (info.fSubmittedProc) {
387 info.fSubmittedProc(info.fSubmittedContext, false);
388 }
389 return GrSemaphoresSubmitted::kNo;
390 }
391
Robert Phillips80bfda82020-11-12 09:23:36 -0500392 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
393 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400394}
395
396bool GrDirectContext::submit(bool syncCpu) {
397 ASSERT_SINGLE_OWNER
398 if (this->abandoned()) {
399 return false;
400 }
401
402 if (!fGpu) {
403 return false;
404 }
405
406 return fGpu->submitToGpu(syncCpu);
407}
408
409////////////////////////////////////////////////////////////////////////////////
410
411void GrDirectContext::checkAsyncWorkCompletion() {
412 if (fGpu) {
413 fGpu->checkFinishProcs();
414 }
415}
416
Greg Daniela89b4302021-01-29 10:48:40 -0500417void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
418 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
419 fGpu->finishOutstandingGpuWork();
420 this->checkAsyncWorkCompletion();
421 }
422}
423
Adlai Holler3acc69a2020-10-13 08:20:51 -0400424////////////////////////////////////////////////////////////////////////////////
425
426void GrDirectContext::storeVkPipelineCacheData() {
427 if (fGpu) {
428 fGpu->storeVkPipelineCacheData();
429 }
430}
431
432////////////////////////////////////////////////////////////////////////////////
433
434bool GrDirectContext::supportsDistanceFieldText() const {
435 return this->caps()->shaderCaps()->supportsDistanceFieldText();
436}
437
438//////////////////////////////////////////////////////////////////////////////
439
440void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
441 ASSERT_SINGLE_OWNER
442 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
443 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
444 this->getTextBlobCache()->usedBytes());
445}
446
Adlai Holler98dd0042020-10-13 10:04:00 -0400447GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
448 const GrBackendFormat& backendFormat,
449 GrMipmapped mipMapped,
450 GrRenderable renderable,
451 GrProtected isProtected) {
452 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
453 if (this->abandoned()) {
454 return GrBackendTexture();
455 }
456
457 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
458 mipMapped, isProtected);
459}
460
461GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
462 SkColorType skColorType,
463 GrMipmapped mipMapped,
464 GrRenderable renderable,
465 GrProtected isProtected) {
466 if (this->abandoned()) {
467 return GrBackendTexture();
468 }
469
470 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
471
472 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
473}
474
475static GrBackendTexture create_and_update_backend_texture(
476 GrDirectContext* dContext,
477 SkISize dimensions,
478 const GrBackendFormat& backendFormat,
479 GrMipmapped mipMapped,
480 GrRenderable renderable,
481 GrProtected isProtected,
482 sk_sp<GrRefCntedCallback> finishedCallback,
483 const GrGpu::BackendTextureData* data) {
484 GrGpu* gpu = dContext->priv().getGpu();
Brian Salomonea1d39b2021-04-01 17:06:52 -0400485 SkASSERT(data->type() == GrGpu::BackendTextureData::Type::kColor);
Adlai Holler98dd0042020-10-13 10:04:00 -0400486 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
487 mipMapped, isProtected);
488 if (!beTex.isValid()) {
489 return {};
490 }
491
492 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
493 std::move(finishedCallback),
494 data)) {
495 dContext->deleteBackendTexture(beTex);
496 return {};
497 }
498 return beTex;
499}
500
Brian Salomonea1d39b2021-04-01 17:06:52 -0400501static bool update_texture_with_pixmaps(GrDirectContext* context,
502 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500503 int numLevels,
504 const GrBackendTexture& backendTexture,
505 GrSurfaceOrigin textureOrigin,
506 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400507 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
508 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500509
Brian Salomonea1d39b2021-04-01 17:06:52 -0400510 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
511 return false;
512 }
513
514 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
515 kBorrow_GrWrapOwnership,
516 GrWrapCacheable::kNo,
517 kRW_GrIOType,
518 std::move(finishedCallback));
519 if (!proxy) {
520 return false;
521 }
522
523 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
524 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
525 GrSurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
526 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500527 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400528 tmpSrc[i] = src[i];
529 }
530 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels, /*prep for sampling*/ true)) {
531 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500532 }
533
Brian Salomonea1d39b2021-04-01 17:06:52 -0400534 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
535 GrFlushInfo info;
536 context->priv().drawingManager()->flushSurfaces({&p, 1},
537 SkSurface::BackendSurfaceAccess::kNoAccess,
538 info,
539 nullptr);
540 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500541}
542
Adlai Holler98dd0042020-10-13 10:04:00 -0400543GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
544 const GrBackendFormat& backendFormat,
545 const SkColor4f& color,
546 GrMipmapped mipMapped,
547 GrRenderable renderable,
548 GrProtected isProtected,
549 GrGpuFinishedProc finishedProc,
550 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500551 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400552
553 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
554 if (this->abandoned()) {
555 return {};
556 }
557
558 GrGpu::BackendTextureData data(color);
559 return create_and_update_backend_texture(this, {width, height},
560 backendFormat, mipMapped, renderable, isProtected,
561 std::move(finishedCallback), &data);
562}
563
564GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
565 SkColorType skColorType,
566 const SkColor4f& color,
567 GrMipmapped mipMapped,
568 GrRenderable renderable,
569 GrProtected isProtected,
570 GrGpuFinishedProc finishedProc,
571 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500572 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400573
574 if (this->abandoned()) {
575 return {};
576 }
577
578 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
579 if (!format.isValid()) {
580 return {};
581 }
582
583 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
584 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
585
586 GrGpu::BackendTextureData data(swizzledColor);
587 return create_and_update_backend_texture(this, {width, height}, format,
588 mipMapped, renderable, isProtected,
589 std::move(finishedCallback), &data);
590}
591
592GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
593 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500594 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400595 GrRenderable renderable,
596 GrProtected isProtected,
597 GrGpuFinishedProc finishedProc,
598 GrGpuFinishedContext finishedContext) {
599 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
600
Brian Salomon694ff172020-11-04 16:54:28 -0500601 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400602
603 if (this->abandoned()) {
604 return {};
605 }
606
607 if (!srcData || numProvidedLevels <= 0) {
608 return {};
609 }
610
Adlai Holler98dd0042020-10-13 10:04:00 -0400611 SkColorType colorType = srcData[0].colorType();
612
613 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400614 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400615 mipMapped = GrMipmapped::kYes;
616 }
617
Adlai Holler98dd0042020-10-13 10:04:00 -0400618 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500619 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
620 srcData[0].height(),
621 backendFormat,
622 mipMapped,
623 renderable,
624 isProtected);
625 if (!beTex.isValid()) {
626 return {};
627 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400628 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500629 srcData,
630 numProvidedLevels,
631 beTex,
632 textureOrigin,
633 std::move(finishedCallback))) {
634 this->deleteBackendTexture(beTex);
635 return {};
636 }
637 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400638}
639
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400640bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
641 const SkColor4f& color,
642 GrGpuFinishedProc finishedProc,
643 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500644 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400645
646 if (this->abandoned()) {
647 return false;
648 }
649
650 GrGpu::BackendTextureData data(color);
651 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
652}
653
654bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
655 SkColorType skColorType,
656 const SkColor4f& color,
657 GrGpuFinishedProc finishedProc,
658 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500659 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400660
661 if (this->abandoned()) {
662 return false;
663 }
664
665 GrBackendFormat format = backendTexture.getBackendFormat();
666 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
667
668 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
669 return false;
670 }
671
672 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
673 GrGpu::BackendTextureData data(swizzle.applyTo(color));
674
675 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
676}
677
678bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
679 const SkPixmap srcData[],
680 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500681 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400682 GrGpuFinishedProc finishedProc,
683 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500684 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400685
686 if (this->abandoned()) {
687 return false;
688 }
689
690 if (!srcData || numLevels <= 0) {
691 return false;
692 }
693
Brian Salomonea1d39b2021-04-01 17:06:52 -0400694 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400695 int numExpectedLevels = 1;
696 if (backendTexture.hasMipmaps()) {
697 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
698 backendTexture.height()) + 1;
699 }
700 if (numLevels != numExpectedLevels) {
701 return false;
702 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400703 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500704 srcData,
705 numLevels,
706 backendTexture,
707 textureOrigin,
708 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400709}
710
Adlai Holler64e13832020-10-13 08:21:56 -0400711//////////////////////////////////////////////////////////////////////////////
712
713static GrBackendTexture create_and_update_compressed_backend_texture(
714 GrDirectContext* dContext,
715 SkISize dimensions,
716 const GrBackendFormat& backendFormat,
717 GrMipmapped mipMapped,
718 GrProtected isProtected,
719 sk_sp<GrRefCntedCallback> finishedCallback,
720 const GrGpu::BackendTextureData* data) {
721 GrGpu* gpu = dContext->priv().getGpu();
722
723 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
724 mipMapped, isProtected);
725 if (!beTex.isValid()) {
726 return {};
727 }
728
729 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
730 beTex, std::move(finishedCallback), data)) {
731 dContext->deleteBackendTexture(beTex);
732 return {};
733 }
734 return beTex;
735}
736
737GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
738 const GrBackendFormat& backendFormat,
739 const SkColor4f& color,
740 GrMipmapped mipMapped,
741 GrProtected isProtected,
742 GrGpuFinishedProc finishedProc,
743 GrGpuFinishedContext finishedContext) {
744 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500745 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400746
747 if (this->abandoned()) {
748 return {};
749 }
750
751 GrGpu::BackendTextureData data(color);
752 return create_and_update_compressed_backend_texture(this, {width, height},
753 backendFormat, mipMapped, isProtected,
754 std::move(finishedCallback), &data);
755}
756
757GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
758 SkImage::CompressionType compression,
759 const SkColor4f& color,
760 GrMipmapped mipMapped,
761 GrProtected isProtected,
762 GrGpuFinishedProc finishedProc,
763 GrGpuFinishedContext finishedContext) {
764 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
765 GrBackendFormat format = this->compressedBackendFormat(compression);
766 return this->createCompressedBackendTexture(width, height, format, color,
767 mipMapped, isProtected, finishedProc,
768 finishedContext);
769}
770
771GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
772 const GrBackendFormat& backendFormat,
773 const void* compressedData,
774 size_t dataSize,
775 GrMipmapped mipMapped,
776 GrProtected isProtected,
777 GrGpuFinishedProc finishedProc,
778 GrGpuFinishedContext finishedContext) {
779 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500780 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400781
782 if (this->abandoned()) {
783 return {};
784 }
785
786 GrGpu::BackendTextureData data(compressedData, dataSize);
787 return create_and_update_compressed_backend_texture(this, {width, height},
788 backendFormat, mipMapped, isProtected,
789 std::move(finishedCallback), &data);
790}
791
792GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
793 SkImage::CompressionType compression,
794 const void* data, size_t dataSize,
795 GrMipmapped mipMapped,
796 GrProtected isProtected,
797 GrGpuFinishedProc finishedProc,
798 GrGpuFinishedContext finishedContext) {
799 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
800 GrBackendFormat format = this->compressedBackendFormat(compression);
801 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
802 isProtected, finishedProc, finishedContext);
803}
804
805bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
806 const SkColor4f& color,
807 GrGpuFinishedProc finishedProc,
808 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500809 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400810
811 if (this->abandoned()) {
812 return false;
813 }
814
815 GrGpu::BackendTextureData data(color);
816 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
817}
818
819bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
820 const void* compressedData,
821 size_t dataSize,
822 GrGpuFinishedProc finishedProc,
823 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500824 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400825
826 if (this->abandoned()) {
827 return false;
828 }
829
830 if (!compressedData) {
831 return false;
832 }
833
834 GrGpu::BackendTextureData data(compressedData, dataSize);
835
836 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
837}
838
Adlai Holler6d0745b2020-10-13 13:29:00 -0400839//////////////////////////////////////////////////////////////////////////////
840
841bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
842 const GrBackendSurfaceMutableState& state,
843 GrBackendSurfaceMutableState* previousState,
844 GrGpuFinishedProc finishedProc,
845 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500846 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400847
848 if (this->abandoned()) {
849 return false;
850 }
851
852 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
853}
854
855
856bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
857 const GrBackendSurfaceMutableState& state,
858 GrBackendSurfaceMutableState* previousState,
859 GrGpuFinishedProc finishedProc,
860 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500861 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400862
863 if (this->abandoned()) {
864 return false;
865 }
866
867 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
868 std::move(callback));
869}
870
871void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
872 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
873 // For the Vulkan backend we still must destroy the backend texture when the context is
874 // abandoned.
875 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
876 return;
877 }
878
879 fGpu->deleteBackendTexture(backendTex);
880}
881
882//////////////////////////////////////////////////////////////////////////////
883
884bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
885 return fGpu->precompileShader(key, data);
886}
887
888#ifdef SK_ENABLE_DUMP_GPU
889#include "include/core/SkString.h"
890#include "src/utils/SkJSONWriter.h"
891SkString GrDirectContext::dump() const {
892 SkDynamicMemoryWStream stream;
893 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
894 writer.beginObject();
895
896 writer.appendString("backend", GrBackendApiToStr(this->backend()));
897
898 writer.appendName("caps");
899 this->caps()->dumpJSON(&writer);
900
901 writer.appendName("gpu");
902 this->fGpu->dumpJSON(&writer);
903
904 writer.appendName("context");
905 this->dumpJSON(&writer);
906
907 // Flush JSON to the memory stream
908 writer.endObject();
909 writer.flush();
910
911 // Null terminate the JSON data in the memory stream
912 stream.write8(0);
913
914 // Allocate a string big enough to hold all the data, then copy out of the stream
915 SkString result(stream.bytesWritten());
916 stream.copyToAndReset(result.writable_str());
917 return result;
918}
919#endif
920
John Rosascoa9b348f2019-11-08 13:18:15 -0800921#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400922
Robert Phillipsf4f80112020-07-13 16:13:31 -0400923/*************************************************************************************************/
924sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500925 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500926 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500927}
928
Robert Phillipsf4f80112020-07-13 16:13:31 -0400929sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400930 return MakeGL(nullptr, options);
931}
932
Robert Phillipsf4f80112020-07-13 16:13:31 -0400933sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400934 GrContextOptions defaultOptions;
935 return MakeGL(nullptr, defaultOptions);
936}
937
Brian Salomon24069eb2020-06-24 10:19:52 -0400938#if GR_TEST_UTILS
939GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
940 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
941 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
942 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
943 // on the thing it captures. So we leak the context.
944 struct GetErrorContext {
945 SkRandom fRandom;
946 GrGLFunction<GrGLGetErrorFn> fGetError;
947 };
948
949 auto errorContext = new GetErrorContext;
950
951#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
952 __lsan_ignore_object(errorContext);
953#endif
954
955 errorContext->fGetError = original;
956
957 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
958 GrGLenum error = errorContext->fGetError();
959 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
960 error = GR_GL_OUT_OF_MEMORY;
961 }
962 return error;
963 });
964}
965#endif
966
Robert Phillipsf4f80112020-07-13 16:13:31 -0400967sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
968 const GrContextOptions& options) {
969 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400970#if GR_TEST_UTILS
971 if (options.fRandomGLOOM) {
972 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
973 copy->fFunctions.fGetError =
974 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
975#if GR_GL_CHECK_ERROR
976 // Suppress logging GL errors since we'll be synthetically generating them.
977 copy->suppressErrorLogging();
978#endif
979 glInterface = std::move(copy);
980 }
981#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400982 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
983 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500984 return nullptr;
985 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500987}
John Rosascoa9b348f2019-11-08 13:18:15 -0800988#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500989
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400991sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
992 GrContextOptions defaultOptions;
993 return MakeMock(mockOptions, defaultOptions);
994}
995
996sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
997 const GrContextOptions& options) {
998 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
999
1000 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1001 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001002 return nullptr;
1003 }
Chris Daltona378b452019-12-11 13:24:11 -05001004
Robert Phillipsf4f80112020-07-13 16:13:31 -04001005 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001006}
1007
Greg Danielb4d89562018-10-03 18:44:49 +00001008#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001009/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001010sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1011 GrContextOptions defaultOptions;
1012 return MakeVulkan(backendContext, defaultOptions);
1013}
1014
1015sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1016 const GrContextOptions& options) {
1017 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1018
1019 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1020 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001021 return nullptr;
1022 }
1023
Robert Phillipsf4f80112020-07-13 16:13:31 -04001024 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001025}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001026#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001027
1028#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001029/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001030sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001031 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001032 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001033}
1034
Jim Van Verth351c9b52020-11-12 15:21:11 -05001035sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1036 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001037 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001038
Jim Van Verth351c9b52020-11-12 15:21:11 -05001039 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001040 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001041 return nullptr;
1042 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001043
Robert Phillipsf4f80112020-07-13 16:13:31 -04001044 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001045}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001046
1047// deprecated
1048sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1049 GrContextOptions defaultOptions;
1050 return MakeMetal(device, queue, defaultOptions);
1051}
1052
1053// deprecated
1054// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1055sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1056 const GrContextOptions& options) {
1057 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1058 GrMtlBackendContext backendContext = {};
1059 backendContext.fDevice.reset(device);
1060 backendContext.fQueue.reset(queue);
1061
1062 return GrDirectContext::MakeMetal(backendContext, options);
1063}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001064#endif
1065
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001066#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001067/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001068sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1069 GrContextOptions defaultOptions;
1070 return MakeDirect3D(backendContext, defaultOptions);
1071}
1072
1073sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1074 const GrContextOptions& options) {
1075 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1076
1077 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1078 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001079 return nullptr;
1080 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001081
Robert Phillipsf4f80112020-07-13 16:13:31 -04001082 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001083}
1084#endif
1085
Stephen White985741a2019-07-18 11:43:45 -04001086#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001087/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001088sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001089 GrContextOptions defaultOptions;
1090 return MakeDawn(device, defaultOptions);
1091}
1092
Robert Phillipsf4f80112020-07-13 16:13:31 -04001093sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1094 const GrContextOptions& options) {
1095 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001096
Robert Phillipsf4f80112020-07-13 16:13:31 -04001097 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1098 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001099 return nullptr;
1100 }
1101
Robert Phillipsf4f80112020-07-13 16:13:31 -04001102 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001103}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001104
Stephen White985741a2019-07-18 11:43:45 -04001105#endif