blob: 4b0a8672da7e7f49597ccb9b18c178b1c0a45d70 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Brian Salomon71283232021-04-08 12:45:58 -040013#include "src/core/SkAutoMalloc.h"
Adlai Holler9555f292020-10-09 09:41:14 -040014#include "src/core/SkTaskGroup.h"
Brian Salomon71283232021-04-08 12:45:58 -040015#include "src/gpu/GrBackendUtils.h"
Adlai Holler9555f292020-10-09 09:41:14 -040016#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040018#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040019#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040021#include "src/gpu/GrResourceProvider.h"
22#include "src/gpu/GrShaderUtils.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040023#include "src/gpu/GrSurfaceContext.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040024#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/effects/GrSkSLFP.h"
26#include "src/gpu/gl/GrGLGpu.h"
27#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040028#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040029#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050030#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040031#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050033#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
36#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050037#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050038#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050039#ifdef SK_DIRECT3D
40#include "src/gpu/d3d/GrD3DGpu.h"
41#endif
Stephen White985741a2019-07-18 11:43:45 -040042#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050043#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040044#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040045#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050046
Brian Salomon24069eb2020-06-24 10:19:52 -040047#if GR_TEST_UTILS
48# include "include/utils/SkRandom.h"
49# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
50# include <sanitizer/lsan_interface.h>
51# endif
52#endif
53
Adlai Holler9555f292020-10-09 09:41:14 -040054#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
55
Robert Phillipse7a959d2021-03-11 14:44:42 -050056GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050057 static std::atomic<uint32_t> nextID{1};
58 uint32_t id;
59 do {
60 id = nextID.fetch_add(1, std::memory_order_relaxed);
61 } while (id == SK_InvalidUniqueID);
62 return DirectContextID(id);
63}
64
Robert Phillipsad248452020-06-30 09:27:52 -040065GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040066 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050067 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040068}
Robert Phillipsa3457b82018-03-08 11:30:12 -050069
Robert Phillipsad248452020-06-30 09:27:52 -040070GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040071 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040072 // this if-test protects against the case where the context is being destroyed
73 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040074 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040075 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050076 }
Adlai Holler9555f292020-10-09 09:41:14 -040077
Greg Daniela89b4302021-01-29 10:48:40 -050078 // We need to make sure all work is finished on the gpu before we start releasing resources.
79 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
80
Adlai Holler9555f292020-10-09 09:41:14 -040081 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040082
83 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
84 if (fResourceCache) {
85 fResourceCache->releaseAll();
86 }
Brian Salomon91a88f02021-02-04 15:34:32 -050087 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
88 // async pixel result don't try to destroy buffers off thread.
89 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040090}
Robert Phillipsa3457b82018-03-08 11:30:12 -050091
Adlai Holler61a591c2020-10-12 12:38:33 -040092sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
93 return INHERITED::threadSafeProxy();
94}
95
Adlai Hollera7a40442020-10-09 09:49:42 -040096void GrDirectContext::resetGLTextureBindings() {
97 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
98 return;
99 }
100 fGpu->resetTextureBindings();
101}
102
103void GrDirectContext::resetContext(uint32_t state) {
104 ASSERT_SINGLE_OWNER
105 fGpu->markContextDirty(state);
106}
107
Robert Phillipsad248452020-06-30 09:27:52 -0400108void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400109 if (INHERITED::abandoned()) {
110 return;
111 }
112
Robert Phillipsad248452020-06-30 09:27:52 -0400113 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400114
Greg Daniela89b4302021-01-29 10:48:40 -0500115 // We need to make sure all work is finished on the gpu before we start releasing resources.
116 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
117
Adlai Hollera7a40442020-10-09 09:49:42 -0400118 fStrikeCache->freeAll();
119
120 fMappedBufferManager->abandon();
121
122 fResourceProvider->abandon();
123
Robert Phillipseb999bc2020-11-03 08:41:47 -0500124 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400125 fResourceCache->abandonAll();
126
127 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
128
Brian Salomon91a88f02021-02-04 15:34:32 -0500129 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400130 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500131
Robert Phillips079455c2020-08-11 15:18:46 -0400132 if (fSmallPathAtlasMgr) {
133 fSmallPathAtlasMgr->reset();
134 }
Robert Phillipsad248452020-06-30 09:27:52 -0400135 fAtlasManager->freeAll();
136}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500137
Adlai Hollera7a40442020-10-09 09:49:42 -0400138bool GrDirectContext::abandoned() {
139 if (INHERITED::abandoned()) {
140 return true;
141 }
142
143 if (fGpu && fGpu->isDeviceLost()) {
144 this->abandonContext();
145 return true;
146 }
147 return false;
148}
149
Adlai Holler61a591c2020-10-12 12:38:33 -0400150bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
151
Robert Phillipsad248452020-06-30 09:27:52 -0400152void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400153 if (INHERITED::abandoned()) {
154 return;
155 }
156
157 INHERITED::abandonContext();
158
Greg Daniela89b4302021-01-29 10:48:40 -0500159 // We need to make sure all work is finished on the gpu before we start releasing resources.
160 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
161
Adlai Holler61a591c2020-10-12 12:38:33 -0400162 fResourceProvider->abandon();
163
164 // Release all resources in the backend 3D API.
165 fResourceCache->releaseAll();
166
Brian Salomon91a88f02021-02-04 15:34:32 -0500167 // Must be after GrResourceCache::releaseAll().
168 fMappedBufferManager.reset();
169
Adlai Holler61a591c2020-10-12 12:38:33 -0400170 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400171 if (fSmallPathAtlasMgr) {
172 fSmallPathAtlasMgr->reset();
173 }
Robert Phillipsad248452020-06-30 09:27:52 -0400174 fAtlasManager->freeAll();
175}
Robert Phillips6db27c22019-05-01 10:43:56 -0400176
Robert Phillipsad248452020-06-30 09:27:52 -0400177void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400178 ASSERT_SINGLE_OWNER
179
180 if (this->abandoned()) {
181 return;
182 }
183
Robert Phillipsad248452020-06-30 09:27:52 -0400184 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400185 if (fSmallPathAtlasMgr) {
186 fSmallPathAtlasMgr->reset();
187 }
Robert Phillipsad248452020-06-30 09:27:52 -0400188 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500189
Adlai Holler4aa4c602020-10-12 13:58:52 -0400190 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
191 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
192 fStrikeCache->freeAll();
193
194 this->drawingManager()->freeGpuResources();
195
196 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400197}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500198
Robert Phillipsad248452020-06-30 09:27:52 -0400199bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400200 ASSERT_SINGLE_OWNER
201 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400202 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500203 }
204
Robert Phillipsae67c522021-03-03 11:03:38 -0500205 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400206 if (!INHERITED::init()) {
207 return false;
208 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500209
Adlai Holler9555f292020-10-09 09:41:14 -0400210 SkASSERT(this->getTextBlobCache());
211 SkASSERT(this->threadSafeCache());
212
213 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400214 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
215 this->directContextID(),
216 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400217 fResourceCache->setProxyProvider(this->proxyProvider());
218 fResourceCache->setThreadSafeCache(this->threadSafeCache());
219 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
220 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500221 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400222
223 fDidTestPMConversions = false;
224
225 // DDL TODO: we need to think through how the task group & persistent cache
226 // get passed on to/shared between all the DDLRecorders created with this context.
227 if (this->options().fExecutor) {
228 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
229 }
230
231 fPersistentCache = this->options().fPersistentCache;
232 fShaderErrorHandler = this->options().fShaderErrorHandler;
233 if (!fShaderErrorHandler) {
234 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
235 }
236
Robert Phillipsad248452020-06-30 09:27:52 -0400237 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
238 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
239 // multitexturing supported only if range can represent the index + texcoords fully
240 !(this->caps()->shaderCaps()->floatIs32Bits() ||
241 this->caps()->shaderCaps()->integerSupport())) {
242 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
243 } else {
244 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
245 }
246
247 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
248
Robert Phillips3262bc82020-08-10 12:11:58 -0400249 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
250 this->options().fGlyphCacheTextureMaximumBytes,
251 allowMultitexturing);
252 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400253
254 return true;
255}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500256
Adlai Holler3a508e92020-10-12 13:58:01 -0400257void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
258 ASSERT_SINGLE_OWNER
259
260 if (resourceCount) {
261 *resourceCount = fResourceCache->getBudgetedResourceCount();
262 }
263 if (resourceBytes) {
264 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
265 }
266}
267
268size_t GrDirectContext::getResourceCachePurgeableBytes() const {
269 ASSERT_SINGLE_OWNER
270 return fResourceCache->getPurgeableBytes();
271}
272
273void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
274 ASSERT_SINGLE_OWNER
275 if (maxResources) {
276 *maxResources = -1;
277 }
278 if (maxResourceBytes) {
279 *maxResourceBytes = this->getResourceCacheLimit();
280 }
281}
282
283size_t GrDirectContext::getResourceCacheLimit() const {
284 ASSERT_SINGLE_OWNER
285 return fResourceCache->getMaxResourceBytes();
286}
287
288void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
289 ASSERT_SINGLE_OWNER
290 this->setResourceCacheLimit(maxResourceBytes);
291}
292
293void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
294 ASSERT_SINGLE_OWNER
295 fResourceCache->setLimit(maxResourceBytes);
296}
297
Adlai Holler4aa4c602020-10-12 13:58:52 -0400298void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
299 ASSERT_SINGLE_OWNER
300
301 if (this->abandoned()) {
302 return;
303 }
304
305 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
306 fResourceCache->purgeAsNeeded();
307
308 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
309 // place to purge stale blobs
310 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400311
312 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400313}
314
315void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
316 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
317
318 ASSERT_SINGLE_OWNER
319
320 if (this->abandoned()) {
321 return;
322 }
323
324 this->checkAsyncWorkCompletion();
325 fMappedBufferManager->process();
326 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
327
328 fResourceCache->purgeAsNeeded();
329 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
330
Adlai Holler4aa4c602020-10-12 13:58:52 -0400331 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
332 // place to purge stale blobs
333 this->getTextBlobCache()->purgeStaleBlobs();
334}
335
336void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
337 ASSERT_SINGLE_OWNER
338
339 if (this->abandoned()) {
340 return;
341 }
342
343 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
344}
345
Adlai Holler3acc69a2020-10-13 08:20:51 -0400346////////////////////////////////////////////////////////////////////////////////
347bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
348 bool deleteSemaphoresAfterWait) {
349 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
350 return false;
351 }
352 GrWrapOwnership ownership =
353 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
354 for (int i = 0; i < numSemaphores; ++i) {
355 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
356 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
357 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
358 // to begin with. Therefore, it is fine to not wait on it.
359 if (sema) {
360 fGpu->waitSemaphore(sema.get());
361 }
362 }
363 return true;
364}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400365
Robert Phillips5edf5102020-08-10 16:30:36 -0400366GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400367 if (!fSmallPathAtlasMgr) {
368 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
369
370 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
371 }
372
373 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
374 return nullptr;
375 }
376
377 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400378}
379
Adlai Holler3acc69a2020-10-13 08:20:51 -0400380////////////////////////////////////////////////////////////////////////////////
381
382GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
383 ASSERT_SINGLE_OWNER
384 if (this->abandoned()) {
385 if (info.fFinishedProc) {
386 info.fFinishedProc(info.fFinishedContext);
387 }
388 if (info.fSubmittedProc) {
389 info.fSubmittedProc(info.fSubmittedContext, false);
390 }
391 return GrSemaphoresSubmitted::kNo;
392 }
393
Robert Phillips80bfda82020-11-12 09:23:36 -0500394 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
395 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400396}
397
398bool GrDirectContext::submit(bool syncCpu) {
399 ASSERT_SINGLE_OWNER
400 if (this->abandoned()) {
401 return false;
402 }
403
404 if (!fGpu) {
405 return false;
406 }
407
408 return fGpu->submitToGpu(syncCpu);
409}
410
411////////////////////////////////////////////////////////////////////////////////
412
413void GrDirectContext::checkAsyncWorkCompletion() {
414 if (fGpu) {
415 fGpu->checkFinishProcs();
416 }
417}
418
Greg Daniela89b4302021-01-29 10:48:40 -0500419void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
420 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
421 fGpu->finishOutstandingGpuWork();
422 this->checkAsyncWorkCompletion();
423 }
424}
425
Adlai Holler3acc69a2020-10-13 08:20:51 -0400426////////////////////////////////////////////////////////////////////////////////
427
428void GrDirectContext::storeVkPipelineCacheData() {
429 if (fGpu) {
430 fGpu->storeVkPipelineCacheData();
431 }
432}
433
434////////////////////////////////////////////////////////////////////////////////
435
436bool GrDirectContext::supportsDistanceFieldText() const {
437 return this->caps()->shaderCaps()->supportsDistanceFieldText();
438}
439
440//////////////////////////////////////////////////////////////////////////////
441
442void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
443 ASSERT_SINGLE_OWNER
444 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
445 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
446 this->getTextBlobCache()->usedBytes());
447}
448
Adlai Holler98dd0042020-10-13 10:04:00 -0400449GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
450 const GrBackendFormat& backendFormat,
451 GrMipmapped mipMapped,
452 GrRenderable renderable,
453 GrProtected isProtected) {
454 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
455 if (this->abandoned()) {
456 return GrBackendTexture();
457 }
458
459 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
460 mipMapped, isProtected);
461}
462
463GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
464 SkColorType skColorType,
465 GrMipmapped mipMapped,
466 GrRenderable renderable,
467 GrProtected isProtected) {
468 if (this->abandoned()) {
469 return GrBackendTexture();
470 }
471
472 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
473
474 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
475}
476
Brian Salomon71283232021-04-08 12:45:58 -0400477static GrBackendTexture create_and_clear_backend_texture(GrDirectContext* dContext,
478 SkISize dimensions,
479 const GrBackendFormat& backendFormat,
480 GrMipmapped mipMapped,
481 GrRenderable renderable,
482 GrProtected isProtected,
483 sk_sp<GrRefCntedCallback> finishedCallback,
484 std::array<float, 4> color) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400485 GrGpu* gpu = dContext->priv().getGpu();
Adlai Holler98dd0042020-10-13 10:04:00 -0400486 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
487 mipMapped, isProtected);
488 if (!beTex.isValid()) {
489 return {};
490 }
491
Brian Salomon71283232021-04-08 12:45:58 -0400492 if (!dContext->priv().getGpu()->clearBackendTexture(beTex,
493 std::move(finishedCallback),
494 color)) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400495 dContext->deleteBackendTexture(beTex);
496 return {};
497 }
498 return beTex;
499}
500
Brian Salomonea1d39b2021-04-01 17:06:52 -0400501static bool update_texture_with_pixmaps(GrDirectContext* context,
502 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500503 int numLevels,
504 const GrBackendTexture& backendTexture,
505 GrSurfaceOrigin textureOrigin,
506 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400507 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
508 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500509
Brian Salomonea1d39b2021-04-01 17:06:52 -0400510 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
511 return false;
512 }
513
514 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
515 kBorrow_GrWrapOwnership,
516 GrWrapCacheable::kNo,
517 kRW_GrIOType,
518 std::move(finishedCallback));
519 if (!proxy) {
520 return false;
521 }
522
523 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
524 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
525 GrSurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
526 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500527 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400528 tmpSrc[i] = src[i];
529 }
Brian Salomon75ee7372021-04-06 15:04:35 -0400530 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels)) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400531 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500532 }
533
Brian Salomonea1d39b2021-04-01 17:06:52 -0400534 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
535 GrFlushInfo info;
536 context->priv().drawingManager()->flushSurfaces({&p, 1},
537 SkSurface::BackendSurfaceAccess::kNoAccess,
538 info,
539 nullptr);
540 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500541}
542
Adlai Holler98dd0042020-10-13 10:04:00 -0400543GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
544 const GrBackendFormat& backendFormat,
545 const SkColor4f& color,
546 GrMipmapped mipMapped,
547 GrRenderable renderable,
548 GrProtected isProtected,
549 GrGpuFinishedProc finishedProc,
550 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500551 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400552
553 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
554 if (this->abandoned()) {
555 return {};
556 }
557
Brian Salomon71283232021-04-08 12:45:58 -0400558 return create_and_clear_backend_texture(this,
559 {width, height},
560 backendFormat,
561 mipMapped,
562 renderable,
563 isProtected,
564 std::move(finishedCallback),
565 color.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400566}
567
568GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
569 SkColorType skColorType,
570 const SkColor4f& color,
571 GrMipmapped mipMapped,
572 GrRenderable renderable,
573 GrProtected isProtected,
574 GrGpuFinishedProc finishedProc,
575 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500576 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400577
578 if (this->abandoned()) {
579 return {};
580 }
581
582 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
583 if (!format.isValid()) {
584 return {};
585 }
586
587 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
588 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
589
Brian Salomon71283232021-04-08 12:45:58 -0400590 return create_and_clear_backend_texture(this,
591 {width, height},
592 format,
593 mipMapped,
594 renderable,
595 isProtected,
596 std::move(finishedCallback),
597 swizzledColor.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400598}
599
600GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
601 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500602 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400603 GrRenderable renderable,
604 GrProtected isProtected,
605 GrGpuFinishedProc finishedProc,
606 GrGpuFinishedContext finishedContext) {
607 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
608
Brian Salomon694ff172020-11-04 16:54:28 -0500609 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400610
611 if (this->abandoned()) {
612 return {};
613 }
614
615 if (!srcData || numProvidedLevels <= 0) {
616 return {};
617 }
618
Adlai Holler98dd0042020-10-13 10:04:00 -0400619 SkColorType colorType = srcData[0].colorType();
620
621 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400622 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400623 mipMapped = GrMipmapped::kYes;
624 }
625
Adlai Holler98dd0042020-10-13 10:04:00 -0400626 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500627 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
628 srcData[0].height(),
629 backendFormat,
630 mipMapped,
631 renderable,
632 isProtected);
633 if (!beTex.isValid()) {
634 return {};
635 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400636 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500637 srcData,
638 numProvidedLevels,
639 beTex,
640 textureOrigin,
641 std::move(finishedCallback))) {
642 this->deleteBackendTexture(beTex);
643 return {};
644 }
645 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400646}
647
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400648bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
649 const SkColor4f& color,
650 GrGpuFinishedProc finishedProc,
651 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500652 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400653
654 if (this->abandoned()) {
655 return false;
656 }
657
Brian Salomon71283232021-04-08 12:45:58 -0400658 return fGpu->clearBackendTexture(backendTexture, std::move(finishedCallback), color.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400659}
660
661bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
662 SkColorType skColorType,
663 const SkColor4f& color,
664 GrGpuFinishedProc finishedProc,
665 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500666 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400667
668 if (this->abandoned()) {
669 return false;
670 }
671
672 GrBackendFormat format = backendTexture.getBackendFormat();
673 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
674
675 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
676 return false;
677 }
678
679 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
Brian Salomon71283232021-04-08 12:45:58 -0400680 SkColor4f swizzledColor = swizzle.applyTo(color);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400681
Brian Salomon71283232021-04-08 12:45:58 -0400682 return fGpu->clearBackendTexture(backendTexture,
683 std::move(finishedCallback),
684 swizzledColor.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400685}
686
687bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
688 const SkPixmap srcData[],
689 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500690 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400691 GrGpuFinishedProc finishedProc,
692 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500693 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400694
695 if (this->abandoned()) {
696 return false;
697 }
698
699 if (!srcData || numLevels <= 0) {
700 return false;
701 }
702
Brian Salomonea1d39b2021-04-01 17:06:52 -0400703 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400704 int numExpectedLevels = 1;
705 if (backendTexture.hasMipmaps()) {
706 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
707 backendTexture.height()) + 1;
708 }
709 if (numLevels != numExpectedLevels) {
710 return false;
711 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400712 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500713 srcData,
714 numLevels,
715 backendTexture,
716 textureOrigin,
717 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400718}
719
Adlai Holler64e13832020-10-13 08:21:56 -0400720//////////////////////////////////////////////////////////////////////////////
721
722static GrBackendTexture create_and_update_compressed_backend_texture(
723 GrDirectContext* dContext,
724 SkISize dimensions,
725 const GrBackendFormat& backendFormat,
726 GrMipmapped mipMapped,
727 GrProtected isProtected,
728 sk_sp<GrRefCntedCallback> finishedCallback,
Brian Salomon71283232021-04-08 12:45:58 -0400729 const void* data,
730 size_t size) {
Adlai Holler64e13832020-10-13 08:21:56 -0400731 GrGpu* gpu = dContext->priv().getGpu();
732
733 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
734 mipMapped, isProtected);
735 if (!beTex.isValid()) {
736 return {};
737 }
738
739 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
Brian Salomon71283232021-04-08 12:45:58 -0400740 beTex, std::move(finishedCallback), data, size)) {
Adlai Holler64e13832020-10-13 08:21:56 -0400741 dContext->deleteBackendTexture(beTex);
742 return {};
743 }
744 return beTex;
745}
746
Brian Salomon71283232021-04-08 12:45:58 -0400747GrBackendTexture GrDirectContext::createCompressedBackendTexture(
748 int width, int height,
749 const GrBackendFormat& backendFormat,
750 const SkColor4f& color,
751 GrMipmapped mipmapped,
752 GrProtected isProtected,
753 GrGpuFinishedProc finishedProc,
754 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400755 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500756 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400757
758 if (this->abandoned()) {
759 return {};
760 }
761
Brian Salomon71283232021-04-08 12:45:58 -0400762 SkImage::CompressionType compression = GrBackendFormatToCompressionType(backendFormat);
763 if (compression == SkImage::CompressionType::kNone) {
764 return {};
765 }
766
767 size_t size = SkCompressedDataSize(compression,
768 {width, height},
769 nullptr,
770 mipmapped == GrMipmapped::kYes);
771 auto storage = std::make_unique<char[]>(size);
772 GrFillInCompressedData(compression, {width, height}, mipmapped, storage.get(), color);
773 return create_and_update_compressed_backend_texture(this,
774 {width, height},
775 backendFormat,
776 mipmapped,
777 isProtected,
778 std::move(finishedCallback),
779 storage.get(),
780 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400781}
782
Brian Salomon71283232021-04-08 12:45:58 -0400783GrBackendTexture GrDirectContext::createCompressedBackendTexture(
784 int width, int height,
785 SkImage::CompressionType compression,
786 const SkColor4f& color,
787 GrMipmapped mipMapped,
788 GrProtected isProtected,
789 GrGpuFinishedProc finishedProc,
790 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400791 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
792 GrBackendFormat format = this->compressedBackendFormat(compression);
793 return this->createCompressedBackendTexture(width, height, format, color,
794 mipMapped, isProtected, finishedProc,
795 finishedContext);
796}
797
Brian Salomon71283232021-04-08 12:45:58 -0400798GrBackendTexture GrDirectContext::createCompressedBackendTexture(
799 int width, int height,
800 const GrBackendFormat& backendFormat,
801 const void* compressedData,
802 size_t dataSize,
803 GrMipmapped mipMapped,
804 GrProtected isProtected,
805 GrGpuFinishedProc finishedProc,
806 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400807 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500808 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400809
810 if (this->abandoned()) {
811 return {};
812 }
813
Brian Salomon71283232021-04-08 12:45:58 -0400814 return create_and_update_compressed_backend_texture(this,
815 {width, height},
816 backendFormat,
817 mipMapped,
818 isProtected,
819 std::move(finishedCallback),
820 compressedData,
821 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400822}
823
Brian Salomon71283232021-04-08 12:45:58 -0400824GrBackendTexture GrDirectContext::createCompressedBackendTexture(
825 int width, int height,
826 SkImage::CompressionType compression,
827 const void* data, size_t dataSize,
828 GrMipmapped mipMapped,
829 GrProtected isProtected,
830 GrGpuFinishedProc finishedProc,
831 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400832 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
833 GrBackendFormat format = this->compressedBackendFormat(compression);
834 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
835 isProtected, finishedProc, finishedContext);
836}
837
838bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
839 const SkColor4f& color,
840 GrGpuFinishedProc finishedProc,
841 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500842 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400843
844 if (this->abandoned()) {
845 return false;
846 }
847
Brian Salomon71283232021-04-08 12:45:58 -0400848 SkImage::CompressionType compression =
849 GrBackendFormatToCompressionType(backendTexture.getBackendFormat());
850 if (compression == SkImage::CompressionType::kNone) {
851 return {};
852 }
853 size_t size = SkCompressedDataSize(compression,
854 backendTexture.dimensions(),
855 nullptr,
856 backendTexture.hasMipmaps());
857 SkAutoMalloc storage(size);
858 GrFillInCompressedData(compression,
859 backendTexture.dimensions(),
860 backendTexture.mipmapped(),
861 static_cast<char*>(storage.get()),
862 color);
863 return fGpu->updateCompressedBackendTexture(backendTexture,
864 std::move(finishedCallback),
865 storage.get(),
866 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400867}
868
869bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
870 const void* compressedData,
871 size_t dataSize,
872 GrGpuFinishedProc finishedProc,
873 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500874 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400875
876 if (this->abandoned()) {
877 return false;
878 }
879
880 if (!compressedData) {
881 return false;
882 }
883
Brian Salomon71283232021-04-08 12:45:58 -0400884 return fGpu->updateCompressedBackendTexture(backendTexture,
885 std::move(finishedCallback),
886 compressedData,
887 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400888}
889
Adlai Holler6d0745b2020-10-13 13:29:00 -0400890//////////////////////////////////////////////////////////////////////////////
891
892bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
893 const GrBackendSurfaceMutableState& state,
894 GrBackendSurfaceMutableState* previousState,
895 GrGpuFinishedProc finishedProc,
896 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500897 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400898
899 if (this->abandoned()) {
900 return false;
901 }
902
903 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
904}
905
906
907bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
908 const GrBackendSurfaceMutableState& state,
909 GrBackendSurfaceMutableState* previousState,
910 GrGpuFinishedProc finishedProc,
911 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500912 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400913
914 if (this->abandoned()) {
915 return false;
916 }
917
918 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
919 std::move(callback));
920}
921
922void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
923 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
924 // For the Vulkan backend we still must destroy the backend texture when the context is
925 // abandoned.
926 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
927 return;
928 }
929
930 fGpu->deleteBackendTexture(backendTex);
931}
932
933//////////////////////////////////////////////////////////////////////////////
934
935bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
936 return fGpu->precompileShader(key, data);
937}
938
939#ifdef SK_ENABLE_DUMP_GPU
940#include "include/core/SkString.h"
941#include "src/utils/SkJSONWriter.h"
942SkString GrDirectContext::dump() const {
943 SkDynamicMemoryWStream stream;
944 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
945 writer.beginObject();
946
947 writer.appendString("backend", GrBackendApiToStr(this->backend()));
948
949 writer.appendName("caps");
950 this->caps()->dumpJSON(&writer);
951
952 writer.appendName("gpu");
953 this->fGpu->dumpJSON(&writer);
954
955 writer.appendName("context");
956 this->dumpJSON(&writer);
957
958 // Flush JSON to the memory stream
959 writer.endObject();
960 writer.flush();
961
962 // Null terminate the JSON data in the memory stream
963 stream.write8(0);
964
965 // Allocate a string big enough to hold all the data, then copy out of the stream
966 SkString result(stream.bytesWritten());
967 stream.copyToAndReset(result.writable_str());
968 return result;
969}
970#endif
971
John Rosascoa9b348f2019-11-08 13:18:15 -0800972#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400973
Robert Phillipsf4f80112020-07-13 16:13:31 -0400974/*************************************************************************************************/
975sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500976 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500977 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500978}
979
Robert Phillipsf4f80112020-07-13 16:13:31 -0400980sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400981 return MakeGL(nullptr, options);
982}
983
Robert Phillipsf4f80112020-07-13 16:13:31 -0400984sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400985 GrContextOptions defaultOptions;
986 return MakeGL(nullptr, defaultOptions);
987}
988
Brian Salomon24069eb2020-06-24 10:19:52 -0400989#if GR_TEST_UTILS
990GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
991 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
992 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
993 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
994 // on the thing it captures. So we leak the context.
995 struct GetErrorContext {
996 SkRandom fRandom;
997 GrGLFunction<GrGLGetErrorFn> fGetError;
998 };
999
1000 auto errorContext = new GetErrorContext;
1001
1002#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
1003 __lsan_ignore_object(errorContext);
1004#endif
1005
1006 errorContext->fGetError = original;
1007
1008 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
1009 GrGLenum error = errorContext->fGetError();
1010 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
1011 error = GR_GL_OUT_OF_MEMORY;
1012 }
1013 return error;
1014 });
1015}
1016#endif
1017
Robert Phillipsf4f80112020-07-13 16:13:31 -04001018sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
1019 const GrContextOptions& options) {
1020 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -04001021#if GR_TEST_UTILS
1022 if (options.fRandomGLOOM) {
1023 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
1024 copy->fFunctions.fGetError =
1025 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
1026#if GR_GL_CHECK_ERROR
1027 // Suppress logging GL errors since we'll be synthetically generating them.
1028 copy->suppressErrorLogging();
1029#endif
1030 glInterface = std::move(copy);
1031 }
1032#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -04001033 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
1034 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001035 return nullptr;
1036 }
Robert Phillipsf4f80112020-07-13 16:13:31 -04001037 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001038}
John Rosascoa9b348f2019-11-08 13:18:15 -08001039#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001040
Robert Phillipsf4f80112020-07-13 16:13:31 -04001041/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001042sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1043 GrContextOptions defaultOptions;
1044 return MakeMock(mockOptions, defaultOptions);
1045}
1046
1047sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1048 const GrContextOptions& options) {
1049 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1050
1051 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1052 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001053 return nullptr;
1054 }
Chris Daltona378b452019-12-11 13:24:11 -05001055
Robert Phillipsf4f80112020-07-13 16:13:31 -04001056 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001057}
1058
Greg Danielb4d89562018-10-03 18:44:49 +00001059#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001060/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001061sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1062 GrContextOptions defaultOptions;
1063 return MakeVulkan(backendContext, defaultOptions);
1064}
1065
1066sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1067 const GrContextOptions& options) {
1068 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1069
1070 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1071 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001072 return nullptr;
1073 }
1074
Robert Phillipsf4f80112020-07-13 16:13:31 -04001075 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001076}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001077#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001078
1079#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001080/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001081sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001082 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001083 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001084}
1085
Jim Van Verth351c9b52020-11-12 15:21:11 -05001086sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1087 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001088 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001089
Jim Van Verth351c9b52020-11-12 15:21:11 -05001090 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001091 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001092 return nullptr;
1093 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001094
Robert Phillipsf4f80112020-07-13 16:13:31 -04001095 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001096}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001097
1098// deprecated
1099sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1100 GrContextOptions defaultOptions;
1101 return MakeMetal(device, queue, defaultOptions);
1102}
1103
1104// deprecated
1105// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1106sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1107 const GrContextOptions& options) {
1108 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1109 GrMtlBackendContext backendContext = {};
1110 backendContext.fDevice.reset(device);
1111 backendContext.fQueue.reset(queue);
1112
1113 return GrDirectContext::MakeMetal(backendContext, options);
1114}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001115#endif
1116
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001117#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001118/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001119sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1120 GrContextOptions defaultOptions;
1121 return MakeDirect3D(backendContext, defaultOptions);
1122}
1123
1124sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1125 const GrContextOptions& options) {
1126 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1127
1128 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1129 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001130 return nullptr;
1131 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001132
Robert Phillipsf4f80112020-07-13 16:13:31 -04001133 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001134}
1135#endif
1136
Stephen White985741a2019-07-18 11:43:45 -04001137#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001138/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001139sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001140 GrContextOptions defaultOptions;
1141 return MakeDawn(device, defaultOptions);
1142}
1143
Robert Phillipsf4f80112020-07-13 16:13:31 -04001144sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1145 const GrContextOptions& options) {
1146 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001147
Robert Phillipsf4f80112020-07-13 16:13:31 -04001148 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1149 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001150 return nullptr;
1151 }
1152
Robert Phillipsf4f80112020-07-13 16:13:31 -04001153 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001154}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001155
Stephen White985741a2019-07-18 11:43:45 -04001156#endif