blob: 6e393e7daa6227100a31b7eef105e2f40ab5d5b5 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Adlai Holler9555f292020-10-09 09:41:14 -040013#include "src/core/SkTaskGroup.h"
14#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040016#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040017#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040019#include "src/gpu/GrResourceProvider.h"
20#include "src/gpu/GrShaderUtils.h"
Adlai Holler3acc69a2020-10-13 08:20:51 -040021#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050022
Adlai Holler4aa4c602020-10-12 13:58:52 -040023#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050030#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050031#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050032#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050033#endif
34#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050035#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050036#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050037#ifdef SK_DIRECT3D
38#include "src/gpu/d3d/GrD3DGpu.h"
39#endif
Stephen White985741a2019-07-18 11:43:45 -040040#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050041#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040042#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040043#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050044
Brian Salomon24069eb2020-06-24 10:19:52 -040045#if GR_TEST_UTILS
46# include "include/utils/SkRandom.h"
47# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
48# include <sanitizer/lsan_interface.h>
49# endif
50#endif
51
Adlai Holler9555f292020-10-09 09:41:14 -040052#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
53
Robert Phillipsad248452020-06-30 09:27:52 -040054GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips3262bc82020-08-10 12:11:58 -040055 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) {
Robert Phillipsad248452020-06-30 09:27:52 -040056}
Robert Phillipsa3457b82018-03-08 11:30:12 -050057
Robert Phillipsad248452020-06-30 09:27:52 -040058GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040059 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040060 // this if-test protects against the case where the context is being destroyed
61 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040062 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040063 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050064 }
Adlai Holler9555f292020-10-09 09:41:14 -040065
Greg Daniela89b4302021-01-29 10:48:40 -050066 // We need to make sure all work is finished on the gpu before we start releasing resources.
67 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
68
Adlai Holler9555f292020-10-09 09:41:14 -040069 this->destroyDrawingManager();
70 fMappedBufferManager.reset();
71
72 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
73 if (fResourceCache) {
74 fResourceCache->releaseAll();
75 }
Robert Phillipsad248452020-06-30 09:27:52 -040076}
Robert Phillipsa3457b82018-03-08 11:30:12 -050077
Adlai Holler61a591c2020-10-12 12:38:33 -040078sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
79 return INHERITED::threadSafeProxy();
80}
81
Adlai Hollera7a40442020-10-09 09:49:42 -040082void GrDirectContext::resetGLTextureBindings() {
83 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
84 return;
85 }
86 fGpu->resetTextureBindings();
87}
88
89void GrDirectContext::resetContext(uint32_t state) {
90 ASSERT_SINGLE_OWNER
91 fGpu->markContextDirty(state);
92}
93
Robert Phillipsad248452020-06-30 09:27:52 -040094void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -040095 if (INHERITED::abandoned()) {
96 return;
97 }
98
Robert Phillipsad248452020-06-30 09:27:52 -040099 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400100
Greg Daniela89b4302021-01-29 10:48:40 -0500101 // We need to make sure all work is finished on the gpu before we start releasing resources.
102 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
103
Adlai Hollera7a40442020-10-09 09:49:42 -0400104 fStrikeCache->freeAll();
105
106 fMappedBufferManager->abandon();
107
108 fResourceProvider->abandon();
109
Robert Phillipseb999bc2020-11-03 08:41:47 -0500110 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400111 fResourceCache->abandonAll();
112
113 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
114
115 fMappedBufferManager.reset();
Robert Phillips079455c2020-08-11 15:18:46 -0400116 if (fSmallPathAtlasMgr) {
117 fSmallPathAtlasMgr->reset();
118 }
Robert Phillipsad248452020-06-30 09:27:52 -0400119 fAtlasManager->freeAll();
120}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500121
Adlai Hollera7a40442020-10-09 09:49:42 -0400122bool GrDirectContext::abandoned() {
123 if (INHERITED::abandoned()) {
124 return true;
125 }
126
127 if (fGpu && fGpu->isDeviceLost()) {
128 this->abandonContext();
129 return true;
130 }
131 return false;
132}
133
Adlai Holler61a591c2020-10-12 12:38:33 -0400134bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
135
Robert Phillipsad248452020-06-30 09:27:52 -0400136void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400137 if (INHERITED::abandoned()) {
138 return;
139 }
140
141 INHERITED::abandonContext();
142
Greg Daniela89b4302021-01-29 10:48:40 -0500143 // We need to make sure all work is finished on the gpu before we start releasing resources.
144 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
145
Adlai Holler61a591c2020-10-12 12:38:33 -0400146 fMappedBufferManager.reset();
147
148 fResourceProvider->abandon();
149
150 // Release all resources in the backend 3D API.
151 fResourceCache->releaseAll();
152
153 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400154 if (fSmallPathAtlasMgr) {
155 fSmallPathAtlasMgr->reset();
156 }
Robert Phillipsad248452020-06-30 09:27:52 -0400157 fAtlasManager->freeAll();
158}
Robert Phillips6db27c22019-05-01 10:43:56 -0400159
Robert Phillipsad248452020-06-30 09:27:52 -0400160void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400161 ASSERT_SINGLE_OWNER
162
163 if (this->abandoned()) {
164 return;
165 }
166
Robert Phillipsad248452020-06-30 09:27:52 -0400167 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400168 if (fSmallPathAtlasMgr) {
169 fSmallPathAtlasMgr->reset();
170 }
Robert Phillipsad248452020-06-30 09:27:52 -0400171 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500172
Adlai Holler4aa4c602020-10-12 13:58:52 -0400173 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
174 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
175 fStrikeCache->freeAll();
176
177 this->drawingManager()->freeGpuResources();
178
179 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400180}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500181
Robert Phillipsad248452020-06-30 09:27:52 -0400182bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400183 ASSERT_SINGLE_OWNER
184 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400185 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500186 }
187
Adlai Holler9555f292020-10-09 09:41:14 -0400188 fThreadSafeProxy->priv().init(fGpu->refCaps());
Robert Phillipsad248452020-06-30 09:27:52 -0400189 if (!INHERITED::init()) {
190 return false;
191 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500192
Adlai Holler9555f292020-10-09 09:41:14 -0400193 SkASSERT(this->getTextBlobCache());
194 SkASSERT(this->threadSafeCache());
195
196 fStrikeCache = std::make_unique<GrStrikeCache>();
Brian Salomonbe1084b2021-01-26 13:29:30 -0500197 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(), this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400198 fResourceCache->setProxyProvider(this->proxyProvider());
199 fResourceCache->setThreadSafeCache(this->threadSafeCache());
200 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
201 this->singleOwner());
202 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->contextID());
203
204 fDidTestPMConversions = false;
205
206 // DDL TODO: we need to think through how the task group & persistent cache
207 // get passed on to/shared between all the DDLRecorders created with this context.
208 if (this->options().fExecutor) {
209 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
210 }
211
212 fPersistentCache = this->options().fPersistentCache;
213 fShaderErrorHandler = this->options().fShaderErrorHandler;
214 if (!fShaderErrorHandler) {
215 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
216 }
217
Robert Phillipsad248452020-06-30 09:27:52 -0400218 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
219 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
220 // multitexturing supported only if range can represent the index + texcoords fully
221 !(this->caps()->shaderCaps()->floatIs32Bits() ||
222 this->caps()->shaderCaps()->integerSupport())) {
223 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
224 } else {
225 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
226 }
227
228 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
229
Robert Phillips3262bc82020-08-10 12:11:58 -0400230 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
231 this->options().fGlyphCacheTextureMaximumBytes,
232 allowMultitexturing);
233 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400234
235 return true;
236}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500237
Adlai Holler3a508e92020-10-12 13:58:01 -0400238void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
239 ASSERT_SINGLE_OWNER
240
241 if (resourceCount) {
242 *resourceCount = fResourceCache->getBudgetedResourceCount();
243 }
244 if (resourceBytes) {
245 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
246 }
247}
248
249size_t GrDirectContext::getResourceCachePurgeableBytes() const {
250 ASSERT_SINGLE_OWNER
251 return fResourceCache->getPurgeableBytes();
252}
253
254void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
255 ASSERT_SINGLE_OWNER
256 if (maxResources) {
257 *maxResources = -1;
258 }
259 if (maxResourceBytes) {
260 *maxResourceBytes = this->getResourceCacheLimit();
261 }
262}
263
264size_t GrDirectContext::getResourceCacheLimit() const {
265 ASSERT_SINGLE_OWNER
266 return fResourceCache->getMaxResourceBytes();
267}
268
269void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
270 ASSERT_SINGLE_OWNER
271 this->setResourceCacheLimit(maxResourceBytes);
272}
273
274void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
275 ASSERT_SINGLE_OWNER
276 fResourceCache->setLimit(maxResourceBytes);
277}
278
Adlai Holler4aa4c602020-10-12 13:58:52 -0400279void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
280 ASSERT_SINGLE_OWNER
281
282 if (this->abandoned()) {
283 return;
284 }
285
286 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
287 fResourceCache->purgeAsNeeded();
288
289 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
290 // place to purge stale blobs
291 this->getTextBlobCache()->purgeStaleBlobs();
292}
293
294void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
295 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
296
297 ASSERT_SINGLE_OWNER
298
299 if (this->abandoned()) {
300 return;
301 }
302
303 this->checkAsyncWorkCompletion();
304 fMappedBufferManager->process();
305 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
306
307 fResourceCache->purgeAsNeeded();
308 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
309
310 if (auto ccpr = this->drawingManager()->getCoverageCountingPathRenderer()) {
311 ccpr->purgeCacheEntriesOlderThan(this->proxyProvider(), purgeTime);
312 }
313
314 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
315 // place to purge stale blobs
316 this->getTextBlobCache()->purgeStaleBlobs();
317}
318
319void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
320 ASSERT_SINGLE_OWNER
321
322 if (this->abandoned()) {
323 return;
324 }
325
326 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
327}
328
Adlai Holler3acc69a2020-10-13 08:20:51 -0400329////////////////////////////////////////////////////////////////////////////////
330bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
331 bool deleteSemaphoresAfterWait) {
332 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
333 return false;
334 }
335 GrWrapOwnership ownership =
336 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
337 for (int i = 0; i < numSemaphores; ++i) {
338 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
339 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
340 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
341 // to begin with. Therefore, it is fine to not wait on it.
342 if (sema) {
343 fGpu->waitSemaphore(sema.get());
344 }
345 }
346 return true;
347}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400348
Robert Phillips5edf5102020-08-10 16:30:36 -0400349GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400350 if (!fSmallPathAtlasMgr) {
351 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
352
353 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
354 }
355
356 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
357 return nullptr;
358 }
359
360 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400361}
362
Adlai Holler3acc69a2020-10-13 08:20:51 -0400363////////////////////////////////////////////////////////////////////////////////
364
365GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
366 ASSERT_SINGLE_OWNER
367 if (this->abandoned()) {
368 if (info.fFinishedProc) {
369 info.fFinishedProc(info.fFinishedContext);
370 }
371 if (info.fSubmittedProc) {
372 info.fSubmittedProc(info.fSubmittedContext, false);
373 }
374 return GrSemaphoresSubmitted::kNo;
375 }
376
Robert Phillips80bfda82020-11-12 09:23:36 -0500377 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
378 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400379}
380
381bool GrDirectContext::submit(bool syncCpu) {
382 ASSERT_SINGLE_OWNER
383 if (this->abandoned()) {
384 return false;
385 }
386
387 if (!fGpu) {
388 return false;
389 }
390
391 return fGpu->submitToGpu(syncCpu);
392}
393
394////////////////////////////////////////////////////////////////////////////////
395
396void GrDirectContext::checkAsyncWorkCompletion() {
397 if (fGpu) {
398 fGpu->checkFinishProcs();
399 }
400}
401
Greg Daniela89b4302021-01-29 10:48:40 -0500402void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
403 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
404 fGpu->finishOutstandingGpuWork();
405 this->checkAsyncWorkCompletion();
406 }
407}
408
Adlai Holler3acc69a2020-10-13 08:20:51 -0400409////////////////////////////////////////////////////////////////////////////////
410
411void GrDirectContext::storeVkPipelineCacheData() {
412 if (fGpu) {
413 fGpu->storeVkPipelineCacheData();
414 }
415}
416
417////////////////////////////////////////////////////////////////////////////////
418
419bool GrDirectContext::supportsDistanceFieldText() const {
420 return this->caps()->shaderCaps()->supportsDistanceFieldText();
421}
422
423//////////////////////////////////////////////////////////////////////////////
424
425void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
426 ASSERT_SINGLE_OWNER
427 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
428 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
429 this->getTextBlobCache()->usedBytes());
430}
431
432size_t GrDirectContext::ComputeImageSize(sk_sp<SkImage> image, GrMipmapped mipMapped,
433 bool useNextPow2) {
434 if (!image->isTextureBacked()) {
435 return 0;
436 }
437 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image.get()));
438 GrTextureProxy* proxy = gpuImage->peekProxy();
439 if (!proxy) {
440 return 0;
441 }
442
443 int colorSamplesPerPixel = 1;
444 return GrSurface::ComputeSize(proxy->backendFormat(), image->dimensions(),
445 colorSamplesPerPixel, mipMapped, useNextPow2);
446}
447
Adlai Holler98dd0042020-10-13 10:04:00 -0400448GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
449 const GrBackendFormat& backendFormat,
450 GrMipmapped mipMapped,
451 GrRenderable renderable,
452 GrProtected isProtected) {
453 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
454 if (this->abandoned()) {
455 return GrBackendTexture();
456 }
457
458 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
459 mipMapped, isProtected);
460}
461
462GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
463 SkColorType skColorType,
464 GrMipmapped mipMapped,
465 GrRenderable renderable,
466 GrProtected isProtected) {
467 if (this->abandoned()) {
468 return GrBackendTexture();
469 }
470
471 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
472
473 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
474}
475
476static GrBackendTexture create_and_update_backend_texture(
477 GrDirectContext* dContext,
478 SkISize dimensions,
479 const GrBackendFormat& backendFormat,
480 GrMipmapped mipMapped,
481 GrRenderable renderable,
482 GrProtected isProtected,
483 sk_sp<GrRefCntedCallback> finishedCallback,
484 const GrGpu::BackendTextureData* data) {
485 GrGpu* gpu = dContext->priv().getGpu();
486
487 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
488 mipMapped, isProtected);
489 if (!beTex.isValid()) {
490 return {};
491 }
492
493 if (!dContext->priv().getGpu()->updateBackendTexture(beTex,
494 std::move(finishedCallback),
495 data)) {
496 dContext->deleteBackendTexture(beTex);
497 return {};
498 }
499 return beTex;
500}
501
Brian Salomonb5f880a2020-12-07 11:30:16 -0500502static bool update_texture_with_pixmaps(GrGpu* gpu,
503 const SkPixmap* srcData,
504 int numLevels,
505 const GrBackendTexture& backendTexture,
506 GrSurfaceOrigin textureOrigin,
507 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500508 bool flip = textureOrigin == kBottomLeft_GrSurfaceOrigin;
509 bool mustBeTight = !gpu->caps()->writePixelsRowBytesSupport();
510
511 size_t size = 0;
512 for (int i = 0; i < numLevels; ++i) {
513 size_t minRowBytes = srcData[i].info().minRowBytes();
514 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
515 size += minRowBytes * srcData[i].height();
516 }
517 }
518
Brian Salomonb5f880a2020-12-07 11:30:16 -0500519 std::unique_ptr<char[]> tempStorage;
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500520 if (size) {
Brian Salomonb5f880a2020-12-07 11:30:16 -0500521 tempStorage.reset(new char[size]);
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500522 }
523 size = 0;
524 SkAutoSTArray<15, GrPixmap> tempPixmaps(numLevels);
525 for (int i = 0; i < numLevels; ++i) {
526 size_t minRowBytes = srcData[i].info().minRowBytes();
527 if (flip || (mustBeTight && srcData[i].rowBytes() != minRowBytes)) {
528 tempPixmaps[i] = {srcData[i].info(), tempStorage.get() + size, minRowBytes};
529 SkAssertResult(GrConvertPixels(tempPixmaps[i], srcData[i], flip));
530 size += minRowBytes*srcData[i].height();
531 } else {
Brian Salomon05487ab2020-12-23 20:32:22 -0500532 tempPixmaps[i] = srcData[i];
533 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500534 }
Brian Salomon07d8c0d2021-01-29 09:45:15 -0500535
Brian Salomon05487ab2020-12-23 20:32:22 -0500536 GrGpu::BackendTextureData data(tempPixmaps.get());
Brian Salomonb5f880a2020-12-07 11:30:16 -0500537 return gpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
538}
539
Adlai Holler98dd0042020-10-13 10:04:00 -0400540GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
541 const GrBackendFormat& backendFormat,
542 const SkColor4f& color,
543 GrMipmapped mipMapped,
544 GrRenderable renderable,
545 GrProtected isProtected,
546 GrGpuFinishedProc finishedProc,
547 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500548 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400549
550 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
551 if (this->abandoned()) {
552 return {};
553 }
554
555 GrGpu::BackendTextureData data(color);
556 return create_and_update_backend_texture(this, {width, height},
557 backendFormat, mipMapped, renderable, isProtected,
558 std::move(finishedCallback), &data);
559}
560
561GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
562 SkColorType skColorType,
563 const SkColor4f& color,
564 GrMipmapped mipMapped,
565 GrRenderable renderable,
566 GrProtected isProtected,
567 GrGpuFinishedProc finishedProc,
568 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500569 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400570
571 if (this->abandoned()) {
572 return {};
573 }
574
575 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
576 if (!format.isValid()) {
577 return {};
578 }
579
580 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
581 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
582
583 GrGpu::BackendTextureData data(swizzledColor);
584 return create_and_update_backend_texture(this, {width, height}, format,
585 mipMapped, renderable, isProtected,
586 std::move(finishedCallback), &data);
587}
588
589GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
590 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500591 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400592 GrRenderable renderable,
593 GrProtected isProtected,
594 GrGpuFinishedProc finishedProc,
595 GrGpuFinishedContext finishedContext) {
596 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
597
Brian Salomon694ff172020-11-04 16:54:28 -0500598 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400599
600 if (this->abandoned()) {
601 return {};
602 }
603
604 if (!srcData || numProvidedLevels <= 0) {
605 return {};
606 }
607
608 int baseWidth = srcData[0].width();
609 int baseHeight = srcData[0].height();
610 SkColorType colorType = srcData[0].colorType();
611
612 GrMipmapped mipMapped = GrMipmapped::kNo;
613 int numExpectedLevels = 1;
614 if (numProvidedLevels > 1) {
615 numExpectedLevels = SkMipmap::ComputeLevelCount(baseWidth, baseHeight) + 1;
616 mipMapped = GrMipmapped::kYes;
617 }
618
619 if (numProvidedLevels != numExpectedLevels) {
620 return {};
621 }
622
623 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500624 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
625 srcData[0].height(),
626 backendFormat,
627 mipMapped,
628 renderable,
629 isProtected);
630 if (!beTex.isValid()) {
631 return {};
632 }
633 if (!update_texture_with_pixmaps(this->priv().getGpu(),
634 srcData,
635 numProvidedLevels,
636 beTex,
637 textureOrigin,
638 std::move(finishedCallback))) {
639 this->deleteBackendTexture(beTex);
640 return {};
641 }
642 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400643}
644
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400645bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
646 const SkColor4f& color,
647 GrGpuFinishedProc finishedProc,
648 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500649 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400650
651 if (this->abandoned()) {
652 return false;
653 }
654
655 GrGpu::BackendTextureData data(color);
656 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
657}
658
659bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
660 SkColorType skColorType,
661 const SkColor4f& color,
662 GrGpuFinishedProc finishedProc,
663 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500664 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400665
666 if (this->abandoned()) {
667 return false;
668 }
669
670 GrBackendFormat format = backendTexture.getBackendFormat();
671 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
672
673 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
674 return false;
675 }
676
677 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
678 GrGpu::BackendTextureData data(swizzle.applyTo(color));
679
680 return fGpu->updateBackendTexture(backendTexture, std::move(finishedCallback), &data);
681}
682
683bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
684 const SkPixmap srcData[],
685 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500686 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400687 GrGpuFinishedProc finishedProc,
688 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500689 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400690
691 if (this->abandoned()) {
692 return false;
693 }
694
695 if (!srcData || numLevels <= 0) {
696 return false;
697 }
698
699 int numExpectedLevels = 1;
700 if (backendTexture.hasMipmaps()) {
701 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
702 backendTexture.height()) + 1;
703 }
704 if (numLevels != numExpectedLevels) {
705 return false;
706 }
Brian Salomonb5f880a2020-12-07 11:30:16 -0500707 return update_texture_with_pixmaps(fGpu.get(),
708 srcData,
709 numLevels,
710 backendTexture,
711 textureOrigin,
712 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400713}
714
Adlai Holler64e13832020-10-13 08:21:56 -0400715//////////////////////////////////////////////////////////////////////////////
716
717static GrBackendTexture create_and_update_compressed_backend_texture(
718 GrDirectContext* dContext,
719 SkISize dimensions,
720 const GrBackendFormat& backendFormat,
721 GrMipmapped mipMapped,
722 GrProtected isProtected,
723 sk_sp<GrRefCntedCallback> finishedCallback,
724 const GrGpu::BackendTextureData* data) {
725 GrGpu* gpu = dContext->priv().getGpu();
726
727 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
728 mipMapped, isProtected);
729 if (!beTex.isValid()) {
730 return {};
731 }
732
733 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
734 beTex, std::move(finishedCallback), data)) {
735 dContext->deleteBackendTexture(beTex);
736 return {};
737 }
738 return beTex;
739}
740
741GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
742 const GrBackendFormat& backendFormat,
743 const SkColor4f& color,
744 GrMipmapped mipMapped,
745 GrProtected isProtected,
746 GrGpuFinishedProc finishedProc,
747 GrGpuFinishedContext finishedContext) {
748 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500749 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400750
751 if (this->abandoned()) {
752 return {};
753 }
754
755 GrGpu::BackendTextureData data(color);
756 return create_and_update_compressed_backend_texture(this, {width, height},
757 backendFormat, mipMapped, isProtected,
758 std::move(finishedCallback), &data);
759}
760
761GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
762 SkImage::CompressionType compression,
763 const SkColor4f& color,
764 GrMipmapped mipMapped,
765 GrProtected isProtected,
766 GrGpuFinishedProc finishedProc,
767 GrGpuFinishedContext finishedContext) {
768 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
769 GrBackendFormat format = this->compressedBackendFormat(compression);
770 return this->createCompressedBackendTexture(width, height, format, color,
771 mipMapped, isProtected, finishedProc,
772 finishedContext);
773}
774
775GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
776 const GrBackendFormat& backendFormat,
777 const void* compressedData,
778 size_t dataSize,
779 GrMipmapped mipMapped,
780 GrProtected isProtected,
781 GrGpuFinishedProc finishedProc,
782 GrGpuFinishedContext finishedContext) {
783 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500784 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400785
786 if (this->abandoned()) {
787 return {};
788 }
789
790 GrGpu::BackendTextureData data(compressedData, dataSize);
791 return create_and_update_compressed_backend_texture(this, {width, height},
792 backendFormat, mipMapped, isProtected,
793 std::move(finishedCallback), &data);
794}
795
796GrBackendTexture GrDirectContext::createCompressedBackendTexture(int width, int height,
797 SkImage::CompressionType compression,
798 const void* data, size_t dataSize,
799 GrMipmapped mipMapped,
800 GrProtected isProtected,
801 GrGpuFinishedProc finishedProc,
802 GrGpuFinishedContext finishedContext) {
803 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
804 GrBackendFormat format = this->compressedBackendFormat(compression);
805 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
806 isProtected, finishedProc, finishedContext);
807}
808
809bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
810 const SkColor4f& color,
811 GrGpuFinishedProc finishedProc,
812 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500813 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400814
815 if (this->abandoned()) {
816 return false;
817 }
818
819 GrGpu::BackendTextureData data(color);
820 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
821}
822
823bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
824 const void* compressedData,
825 size_t dataSize,
826 GrGpuFinishedProc finishedProc,
827 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500828 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400829
830 if (this->abandoned()) {
831 return false;
832 }
833
834 if (!compressedData) {
835 return false;
836 }
837
838 GrGpu::BackendTextureData data(compressedData, dataSize);
839
840 return fGpu->updateCompressedBackendTexture(backendTexture, std::move(finishedCallback), &data);
841}
842
Adlai Holler6d0745b2020-10-13 13:29:00 -0400843//////////////////////////////////////////////////////////////////////////////
844
845bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
846 const GrBackendSurfaceMutableState& state,
847 GrBackendSurfaceMutableState* previousState,
848 GrGpuFinishedProc finishedProc,
849 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500850 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400851
852 if (this->abandoned()) {
853 return false;
854 }
855
856 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
857}
858
859
860bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
861 const GrBackendSurfaceMutableState& state,
862 GrBackendSurfaceMutableState* previousState,
863 GrGpuFinishedProc finishedProc,
864 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500865 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400866
867 if (this->abandoned()) {
868 return false;
869 }
870
871 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
872 std::move(callback));
873}
874
875void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
876 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
877 // For the Vulkan backend we still must destroy the backend texture when the context is
878 // abandoned.
879 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
880 return;
881 }
882
883 fGpu->deleteBackendTexture(backendTex);
884}
885
886//////////////////////////////////////////////////////////////////////////////
887
888bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
889 return fGpu->precompileShader(key, data);
890}
891
892#ifdef SK_ENABLE_DUMP_GPU
893#include "include/core/SkString.h"
894#include "src/utils/SkJSONWriter.h"
895SkString GrDirectContext::dump() const {
896 SkDynamicMemoryWStream stream;
897 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
898 writer.beginObject();
899
900 writer.appendString("backend", GrBackendApiToStr(this->backend()));
901
902 writer.appendName("caps");
903 this->caps()->dumpJSON(&writer);
904
905 writer.appendName("gpu");
906 this->fGpu->dumpJSON(&writer);
907
908 writer.appendName("context");
909 this->dumpJSON(&writer);
910
911 // Flush JSON to the memory stream
912 writer.endObject();
913 writer.flush();
914
915 // Null terminate the JSON data in the memory stream
916 stream.write8(0);
917
918 // Allocate a string big enough to hold all the data, then copy out of the stream
919 SkString result(stream.bytesWritten());
920 stream.copyToAndReset(result.writable_str());
921 return result;
922}
923#endif
924
John Rosascoa9b348f2019-11-08 13:18:15 -0800925#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400926
Robert Phillipsf4f80112020-07-13 16:13:31 -0400927/*************************************************************************************************/
928sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500929 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500930 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500931}
932
Robert Phillipsf4f80112020-07-13 16:13:31 -0400933sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400934 return MakeGL(nullptr, options);
935}
936
Robert Phillipsf4f80112020-07-13 16:13:31 -0400937sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400938 GrContextOptions defaultOptions;
939 return MakeGL(nullptr, defaultOptions);
940}
941
Brian Salomon24069eb2020-06-24 10:19:52 -0400942#if GR_TEST_UTILS
943GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
944 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
945 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
946 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
947 // on the thing it captures. So we leak the context.
948 struct GetErrorContext {
949 SkRandom fRandom;
950 GrGLFunction<GrGLGetErrorFn> fGetError;
951 };
952
953 auto errorContext = new GetErrorContext;
954
955#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
956 __lsan_ignore_object(errorContext);
957#endif
958
959 errorContext->fGetError = original;
960
961 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
962 GrGLenum error = errorContext->fGetError();
963 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
964 error = GR_GL_OUT_OF_MEMORY;
965 }
966 return error;
967 });
968}
969#endif
970
Robert Phillipsf4f80112020-07-13 16:13:31 -0400971sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
972 const GrContextOptions& options) {
973 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -0400974#if GR_TEST_UTILS
975 if (options.fRandomGLOOM) {
976 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
977 copy->fFunctions.fGetError =
978 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
979#if GR_GL_CHECK_ERROR
980 // Suppress logging GL errors since we'll be synthetically generating them.
981 copy->suppressErrorLogging();
982#endif
983 glInterface = std::move(copy);
984 }
985#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
987 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500988 return nullptr;
989 }
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500991}
John Rosascoa9b348f2019-11-08 13:18:15 -0800992#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -0500993
Robert Phillipsf4f80112020-07-13 16:13:31 -0400994/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -0400995sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
996 GrContextOptions defaultOptions;
997 return MakeMock(mockOptions, defaultOptions);
998}
999
1000sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1001 const GrContextOptions& options) {
1002 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1003
1004 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1005 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001006 return nullptr;
1007 }
Chris Daltona378b452019-12-11 13:24:11 -05001008
Robert Phillipsf4f80112020-07-13 16:13:31 -04001009 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001010}
1011
Greg Danielb4d89562018-10-03 18:44:49 +00001012#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001013/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001014sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1015 GrContextOptions defaultOptions;
1016 return MakeVulkan(backendContext, defaultOptions);
1017}
1018
1019sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1020 const GrContextOptions& options) {
1021 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1022
1023 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1024 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001025 return nullptr;
1026 }
1027
Robert Phillipsf4f80112020-07-13 16:13:31 -04001028 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001029}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001030#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001031
1032#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001033/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001034sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001035 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001036 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001037}
1038
Jim Van Verth351c9b52020-11-12 15:21:11 -05001039sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1040 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001041 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001042
Jim Van Verth351c9b52020-11-12 15:21:11 -05001043 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001044 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001045 return nullptr;
1046 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001047
Robert Phillipsf4f80112020-07-13 16:13:31 -04001048 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001049}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001050
1051// deprecated
1052sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1053 GrContextOptions defaultOptions;
1054 return MakeMetal(device, queue, defaultOptions);
1055}
1056
1057// deprecated
1058// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1059sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1060 const GrContextOptions& options) {
1061 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1062 GrMtlBackendContext backendContext = {};
1063 backendContext.fDevice.reset(device);
1064 backendContext.fQueue.reset(queue);
1065
1066 return GrDirectContext::MakeMetal(backendContext, options);
1067}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001068#endif
1069
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001070#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001071/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001072sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1073 GrContextOptions defaultOptions;
1074 return MakeDirect3D(backendContext, defaultOptions);
1075}
1076
1077sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1078 const GrContextOptions& options) {
1079 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1080
1081 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1082 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001083 return nullptr;
1084 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001085
Robert Phillipsf4f80112020-07-13 16:13:31 -04001086 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001087}
1088#endif
1089
Stephen White985741a2019-07-18 11:43:45 -04001090#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001091/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001092sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001093 GrContextOptions defaultOptions;
1094 return MakeDawn(device, defaultOptions);
1095}
1096
Robert Phillipsf4f80112020-07-13 16:13:31 -04001097sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1098 const GrContextOptions& options) {
1099 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001100
Robert Phillipsf4f80112020-07-13 16:13:31 -04001101 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1102 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001103 return nullptr;
1104 }
1105
Robert Phillipsf4f80112020-07-13 16:13:31 -04001106 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001107}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001108
Stephen White985741a2019-07-18 11:43:45 -04001109#endif