blob: f402df0c786850c6609c1af4302ec81add1594e7 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Brian Salomon71283232021-04-08 12:45:58 -040013#include "src/core/SkAutoMalloc.h"
Adlai Holler9555f292020-10-09 09:41:14 -040014#include "src/core/SkTaskGroup.h"
Brian Salomon71283232021-04-08 12:45:58 -040015#include "src/gpu/GrBackendUtils.h"
Adlai Holler9555f292020-10-09 09:41:14 -040016#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040018#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040019#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040021#include "src/gpu/GrResourceProvider.h"
22#include "src/gpu/GrShaderUtils.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040023#include "src/gpu/GrSurfaceContext.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040024#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/effects/GrSkSLFP.h"
26#include "src/gpu/gl/GrGLGpu.h"
27#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040028#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040029#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050030#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040031#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050033#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
36#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050037#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050038#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050039#ifdef SK_DIRECT3D
40#include "src/gpu/d3d/GrD3DGpu.h"
41#endif
Stephen White985741a2019-07-18 11:43:45 -040042#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050043#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040044#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040045#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050046
Brian Salomon24069eb2020-06-24 10:19:52 -040047#if GR_TEST_UTILS
48# include "include/utils/SkRandom.h"
49# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
50# include <sanitizer/lsan_interface.h>
51# endif
52#endif
53
Adlai Holler9555f292020-10-09 09:41:14 -040054#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
55
Robert Phillipse7a959d2021-03-11 14:44:42 -050056GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050057 static std::atomic<uint32_t> nextID{1};
58 uint32_t id;
59 do {
60 id = nextID.fetch_add(1, std::memory_order_relaxed);
61 } while (id == SK_InvalidUniqueID);
62 return DirectContextID(id);
63}
64
Robert Phillipsad248452020-06-30 09:27:52 -040065GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040066 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050067 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040068}
Robert Phillipsa3457b82018-03-08 11:30:12 -050069
Robert Phillipsad248452020-06-30 09:27:52 -040070GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040071 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040072 // this if-test protects against the case where the context is being destroyed
73 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040074 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040075 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050076 }
Adlai Holler9555f292020-10-09 09:41:14 -040077
Greg Daniela89b4302021-01-29 10:48:40 -050078 // We need to make sure all work is finished on the gpu before we start releasing resources.
79 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
80
Adlai Holler9555f292020-10-09 09:41:14 -040081 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040082
83 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
84 if (fResourceCache) {
85 fResourceCache->releaseAll();
86 }
Brian Salomon91a88f02021-02-04 15:34:32 -050087 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
88 // async pixel result don't try to destroy buffers off thread.
89 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040090}
Robert Phillipsa3457b82018-03-08 11:30:12 -050091
Adlai Holler61a591c2020-10-12 12:38:33 -040092sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
93 return INHERITED::threadSafeProxy();
94}
95
Adlai Hollera7a40442020-10-09 09:49:42 -040096void GrDirectContext::resetGLTextureBindings() {
97 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
98 return;
99 }
100 fGpu->resetTextureBindings();
101}
102
103void GrDirectContext::resetContext(uint32_t state) {
104 ASSERT_SINGLE_OWNER
105 fGpu->markContextDirty(state);
106}
107
Robert Phillipsad248452020-06-30 09:27:52 -0400108void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400109 if (INHERITED::abandoned()) {
110 return;
111 }
112
Robert Phillipsad248452020-06-30 09:27:52 -0400113 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400114
Greg Daniela89b4302021-01-29 10:48:40 -0500115 // We need to make sure all work is finished on the gpu before we start releasing resources.
116 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
117
Adlai Hollera7a40442020-10-09 09:49:42 -0400118 fStrikeCache->freeAll();
119
120 fMappedBufferManager->abandon();
121
122 fResourceProvider->abandon();
123
Robert Phillipseb999bc2020-11-03 08:41:47 -0500124 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400125 fResourceCache->abandonAll();
126
127 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
128
Brian Salomon91a88f02021-02-04 15:34:32 -0500129 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400130 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500131
Robert Phillips079455c2020-08-11 15:18:46 -0400132 if (fSmallPathAtlasMgr) {
133 fSmallPathAtlasMgr->reset();
134 }
Robert Phillipsad248452020-06-30 09:27:52 -0400135 fAtlasManager->freeAll();
136}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500137
Adlai Hollera7a40442020-10-09 09:49:42 -0400138bool GrDirectContext::abandoned() {
139 if (INHERITED::abandoned()) {
140 return true;
141 }
142
143 if (fGpu && fGpu->isDeviceLost()) {
144 this->abandonContext();
145 return true;
146 }
147 return false;
148}
149
Adlai Holler61a591c2020-10-12 12:38:33 -0400150bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
151
Robert Phillipsad248452020-06-30 09:27:52 -0400152void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400153 if (INHERITED::abandoned()) {
154 return;
155 }
156
157 INHERITED::abandonContext();
158
Greg Daniela89b4302021-01-29 10:48:40 -0500159 // We need to make sure all work is finished on the gpu before we start releasing resources.
160 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
161
Adlai Holler61a591c2020-10-12 12:38:33 -0400162 fResourceProvider->abandon();
163
164 // Release all resources in the backend 3D API.
165 fResourceCache->releaseAll();
166
Brian Salomon91a88f02021-02-04 15:34:32 -0500167 // Must be after GrResourceCache::releaseAll().
168 fMappedBufferManager.reset();
169
Adlai Holler61a591c2020-10-12 12:38:33 -0400170 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400171 if (fSmallPathAtlasMgr) {
172 fSmallPathAtlasMgr->reset();
173 }
Robert Phillipsad248452020-06-30 09:27:52 -0400174 fAtlasManager->freeAll();
175}
Robert Phillips6db27c22019-05-01 10:43:56 -0400176
Robert Phillipsad248452020-06-30 09:27:52 -0400177void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400178 ASSERT_SINGLE_OWNER
179
180 if (this->abandoned()) {
181 return;
182 }
183
Robert Phillipsad248452020-06-30 09:27:52 -0400184 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400185 if (fSmallPathAtlasMgr) {
186 fSmallPathAtlasMgr->reset();
187 }
Robert Phillipsad248452020-06-30 09:27:52 -0400188 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500189
Adlai Holler4aa4c602020-10-12 13:58:52 -0400190 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
191 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
192 fStrikeCache->freeAll();
193
194 this->drawingManager()->freeGpuResources();
195
196 fResourceCache->purgeAllUnlocked();
Robert Phillipsad248452020-06-30 09:27:52 -0400197}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500198
Robert Phillipsad248452020-06-30 09:27:52 -0400199bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400200 ASSERT_SINGLE_OWNER
201 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400202 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500203 }
204
Robert Phillipsae67c522021-03-03 11:03:38 -0500205 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400206 if (!INHERITED::init()) {
207 return false;
208 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500209
Adlai Holler9555f292020-10-09 09:41:14 -0400210 SkASSERT(this->getTextBlobCache());
211 SkASSERT(this->threadSafeCache());
212
213 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400214 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
215 this->directContextID(),
216 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400217 fResourceCache->setProxyProvider(this->proxyProvider());
218 fResourceCache->setThreadSafeCache(this->threadSafeCache());
Adlai Hollerb34270e2021-04-16 11:23:52 -0400219#if GR_TEST_UTILS
220 if (this->options().fResourceCacheLimitOverride != -1) {
221 this->setResourceCacheLimit(this->options().fResourceCacheLimitOverride);
222 }
223#endif
Adlai Holler9555f292020-10-09 09:41:14 -0400224 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
225 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500226 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400227
228 fDidTestPMConversions = false;
229
230 // DDL TODO: we need to think through how the task group & persistent cache
231 // get passed on to/shared between all the DDLRecorders created with this context.
232 if (this->options().fExecutor) {
233 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
234 }
235
236 fPersistentCache = this->options().fPersistentCache;
237 fShaderErrorHandler = this->options().fShaderErrorHandler;
238 if (!fShaderErrorHandler) {
239 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
240 }
241
Robert Phillipsad248452020-06-30 09:27:52 -0400242 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
243 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
244 // multitexturing supported only if range can represent the index + texcoords fully
245 !(this->caps()->shaderCaps()->floatIs32Bits() ||
246 this->caps()->shaderCaps()->integerSupport())) {
247 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
248 } else {
249 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
250 }
251
252 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
253
Robert Phillips3262bc82020-08-10 12:11:58 -0400254 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
255 this->options().fGlyphCacheTextureMaximumBytes,
256 allowMultitexturing);
257 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400258
259 return true;
260}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500261
Adlai Holler3a508e92020-10-12 13:58:01 -0400262void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
263 ASSERT_SINGLE_OWNER
264
265 if (resourceCount) {
266 *resourceCount = fResourceCache->getBudgetedResourceCount();
267 }
268 if (resourceBytes) {
269 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
270 }
271}
272
273size_t GrDirectContext::getResourceCachePurgeableBytes() const {
274 ASSERT_SINGLE_OWNER
275 return fResourceCache->getPurgeableBytes();
276}
277
278void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
279 ASSERT_SINGLE_OWNER
280 if (maxResources) {
281 *maxResources = -1;
282 }
283 if (maxResourceBytes) {
284 *maxResourceBytes = this->getResourceCacheLimit();
285 }
286}
287
288size_t GrDirectContext::getResourceCacheLimit() const {
289 ASSERT_SINGLE_OWNER
290 return fResourceCache->getMaxResourceBytes();
291}
292
293void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
294 ASSERT_SINGLE_OWNER
295 this->setResourceCacheLimit(maxResourceBytes);
296}
297
298void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
299 ASSERT_SINGLE_OWNER
300 fResourceCache->setLimit(maxResourceBytes);
301}
302
Adlai Holler4aa4c602020-10-12 13:58:52 -0400303void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
304 ASSERT_SINGLE_OWNER
305
306 if (this->abandoned()) {
307 return;
308 }
309
310 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
311 fResourceCache->purgeAsNeeded();
312
313 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
314 // place to purge stale blobs
315 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400316
317 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400318}
319
320void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
321 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
322
323 ASSERT_SINGLE_OWNER
324
325 if (this->abandoned()) {
326 return;
327 }
328
329 this->checkAsyncWorkCompletion();
330 fMappedBufferManager->process();
331 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
332
333 fResourceCache->purgeAsNeeded();
334 fResourceCache->purgeResourcesNotUsedSince(purgeTime);
335
Adlai Holler4aa4c602020-10-12 13:58:52 -0400336 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
337 // place to purge stale blobs
338 this->getTextBlobCache()->purgeStaleBlobs();
339}
340
341void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
342 ASSERT_SINGLE_OWNER
343
344 if (this->abandoned()) {
345 return;
346 }
347
348 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
349}
350
Adlai Holler3acc69a2020-10-13 08:20:51 -0400351////////////////////////////////////////////////////////////////////////////////
352bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
353 bool deleteSemaphoresAfterWait) {
354 if (!fGpu || fGpu->caps()->semaphoreSupport()) {
355 return false;
356 }
357 GrWrapOwnership ownership =
358 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
359 for (int i = 0; i < numSemaphores; ++i) {
360 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
361 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
362 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
363 // to begin with. Therefore, it is fine to not wait on it.
364 if (sema) {
365 fGpu->waitSemaphore(sema.get());
366 }
367 }
368 return true;
369}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400370
Robert Phillips5edf5102020-08-10 16:30:36 -0400371GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400372 if (!fSmallPathAtlasMgr) {
373 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
374
375 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
376 }
377
378 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
379 return nullptr;
380 }
381
382 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400383}
384
Adlai Holler3acc69a2020-10-13 08:20:51 -0400385////////////////////////////////////////////////////////////////////////////////
386
387GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
388 ASSERT_SINGLE_OWNER
389 if (this->abandoned()) {
390 if (info.fFinishedProc) {
391 info.fFinishedProc(info.fFinishedContext);
392 }
393 if (info.fSubmittedProc) {
394 info.fSubmittedProc(info.fSubmittedContext, false);
395 }
396 return GrSemaphoresSubmitted::kNo;
397 }
398
Robert Phillips80bfda82020-11-12 09:23:36 -0500399 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
400 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400401}
402
403bool GrDirectContext::submit(bool syncCpu) {
404 ASSERT_SINGLE_OWNER
405 if (this->abandoned()) {
406 return false;
407 }
408
409 if (!fGpu) {
410 return false;
411 }
412
413 return fGpu->submitToGpu(syncCpu);
414}
415
416////////////////////////////////////////////////////////////////////////////////
417
418void GrDirectContext::checkAsyncWorkCompletion() {
419 if (fGpu) {
420 fGpu->checkFinishProcs();
421 }
422}
423
Greg Daniela89b4302021-01-29 10:48:40 -0500424void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
425 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
426 fGpu->finishOutstandingGpuWork();
427 this->checkAsyncWorkCompletion();
428 }
429}
430
Adlai Holler3acc69a2020-10-13 08:20:51 -0400431////////////////////////////////////////////////////////////////////////////////
432
433void GrDirectContext::storeVkPipelineCacheData() {
434 if (fGpu) {
435 fGpu->storeVkPipelineCacheData();
436 }
437}
438
439////////////////////////////////////////////////////////////////////////////////
440
441bool GrDirectContext::supportsDistanceFieldText() const {
442 return this->caps()->shaderCaps()->supportsDistanceFieldText();
443}
444
445//////////////////////////////////////////////////////////////////////////////
446
447void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
448 ASSERT_SINGLE_OWNER
449 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
450 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
451 this->getTextBlobCache()->usedBytes());
452}
453
Adlai Holler98dd0042020-10-13 10:04:00 -0400454GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
455 const GrBackendFormat& backendFormat,
456 GrMipmapped mipMapped,
457 GrRenderable renderable,
458 GrProtected isProtected) {
459 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
460 if (this->abandoned()) {
461 return GrBackendTexture();
462 }
463
464 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
465 mipMapped, isProtected);
466}
467
468GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
469 SkColorType skColorType,
470 GrMipmapped mipMapped,
471 GrRenderable renderable,
472 GrProtected isProtected) {
473 if (this->abandoned()) {
474 return GrBackendTexture();
475 }
476
477 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
478
479 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
480}
481
Brian Salomon71283232021-04-08 12:45:58 -0400482static GrBackendTexture create_and_clear_backend_texture(GrDirectContext* dContext,
483 SkISize dimensions,
484 const GrBackendFormat& backendFormat,
485 GrMipmapped mipMapped,
486 GrRenderable renderable,
487 GrProtected isProtected,
488 sk_sp<GrRefCntedCallback> finishedCallback,
489 std::array<float, 4> color) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400490 GrGpu* gpu = dContext->priv().getGpu();
Adlai Holler98dd0042020-10-13 10:04:00 -0400491 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
492 mipMapped, isProtected);
493 if (!beTex.isValid()) {
494 return {};
495 }
496
Brian Salomon71283232021-04-08 12:45:58 -0400497 if (!dContext->priv().getGpu()->clearBackendTexture(beTex,
498 std::move(finishedCallback),
499 color)) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400500 dContext->deleteBackendTexture(beTex);
501 return {};
502 }
503 return beTex;
504}
505
Brian Salomonea1d39b2021-04-01 17:06:52 -0400506static bool update_texture_with_pixmaps(GrDirectContext* context,
507 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500508 int numLevels,
509 const GrBackendTexture& backendTexture,
510 GrSurfaceOrigin textureOrigin,
511 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400512 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
513 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500514
Brian Salomonea1d39b2021-04-01 17:06:52 -0400515 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
516 return false;
517 }
518
519 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
520 kBorrow_GrWrapOwnership,
521 GrWrapCacheable::kNo,
522 kRW_GrIOType,
523 std::move(finishedCallback));
524 if (!proxy) {
525 return false;
526 }
527
528 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
529 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
530 GrSurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
531 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500532 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400533 tmpSrc[i] = src[i];
534 }
Brian Salomon75ee7372021-04-06 15:04:35 -0400535 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels)) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400536 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500537 }
538
Brian Salomonea1d39b2021-04-01 17:06:52 -0400539 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
540 GrFlushInfo info;
541 context->priv().drawingManager()->flushSurfaces({&p, 1},
542 SkSurface::BackendSurfaceAccess::kNoAccess,
543 info,
544 nullptr);
545 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500546}
547
Adlai Holler98dd0042020-10-13 10:04:00 -0400548GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
549 const GrBackendFormat& backendFormat,
550 const SkColor4f& color,
551 GrMipmapped mipMapped,
552 GrRenderable renderable,
553 GrProtected isProtected,
554 GrGpuFinishedProc finishedProc,
555 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500556 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400557
558 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
559 if (this->abandoned()) {
560 return {};
561 }
562
Brian Salomon71283232021-04-08 12:45:58 -0400563 return create_and_clear_backend_texture(this,
564 {width, height},
565 backendFormat,
566 mipMapped,
567 renderable,
568 isProtected,
569 std::move(finishedCallback),
570 color.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400571}
572
573GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
574 SkColorType skColorType,
575 const SkColor4f& color,
576 GrMipmapped mipMapped,
577 GrRenderable renderable,
578 GrProtected isProtected,
579 GrGpuFinishedProc finishedProc,
580 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500581 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400582
583 if (this->abandoned()) {
584 return {};
585 }
586
587 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
588 if (!format.isValid()) {
589 return {};
590 }
591
592 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
593 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
594
Brian Salomon71283232021-04-08 12:45:58 -0400595 return create_and_clear_backend_texture(this,
596 {width, height},
597 format,
598 mipMapped,
599 renderable,
600 isProtected,
601 std::move(finishedCallback),
602 swizzledColor.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400603}
604
605GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
606 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500607 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400608 GrRenderable renderable,
609 GrProtected isProtected,
610 GrGpuFinishedProc finishedProc,
611 GrGpuFinishedContext finishedContext) {
612 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
613
Brian Salomon694ff172020-11-04 16:54:28 -0500614 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400615
616 if (this->abandoned()) {
617 return {};
618 }
619
620 if (!srcData || numProvidedLevels <= 0) {
621 return {};
622 }
623
Adlai Holler98dd0042020-10-13 10:04:00 -0400624 SkColorType colorType = srcData[0].colorType();
625
626 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400627 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400628 mipMapped = GrMipmapped::kYes;
629 }
630
Adlai Holler98dd0042020-10-13 10:04:00 -0400631 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500632 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
633 srcData[0].height(),
634 backendFormat,
635 mipMapped,
636 renderable,
637 isProtected);
638 if (!beTex.isValid()) {
639 return {};
640 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400641 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500642 srcData,
643 numProvidedLevels,
644 beTex,
645 textureOrigin,
646 std::move(finishedCallback))) {
647 this->deleteBackendTexture(beTex);
648 return {};
649 }
650 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400651}
652
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400653bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
654 const SkColor4f& color,
655 GrGpuFinishedProc finishedProc,
656 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500657 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400658
659 if (this->abandoned()) {
660 return false;
661 }
662
Brian Salomon71283232021-04-08 12:45:58 -0400663 return fGpu->clearBackendTexture(backendTexture, std::move(finishedCallback), color.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400664}
665
666bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
667 SkColorType skColorType,
668 const SkColor4f& color,
669 GrGpuFinishedProc finishedProc,
670 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500671 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400672
673 if (this->abandoned()) {
674 return false;
675 }
676
677 GrBackendFormat format = backendTexture.getBackendFormat();
678 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
679
680 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
681 return false;
682 }
683
684 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
Brian Salomon71283232021-04-08 12:45:58 -0400685 SkColor4f swizzledColor = swizzle.applyTo(color);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400686
Brian Salomon71283232021-04-08 12:45:58 -0400687 return fGpu->clearBackendTexture(backendTexture,
688 std::move(finishedCallback),
689 swizzledColor.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400690}
691
692bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
693 const SkPixmap srcData[],
694 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500695 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400696 GrGpuFinishedProc finishedProc,
697 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500698 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400699
700 if (this->abandoned()) {
701 return false;
702 }
703
704 if (!srcData || numLevels <= 0) {
705 return false;
706 }
707
Brian Salomonea1d39b2021-04-01 17:06:52 -0400708 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400709 int numExpectedLevels = 1;
710 if (backendTexture.hasMipmaps()) {
711 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
712 backendTexture.height()) + 1;
713 }
714 if (numLevels != numExpectedLevels) {
715 return false;
716 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400717 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500718 srcData,
719 numLevels,
720 backendTexture,
721 textureOrigin,
722 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400723}
724
Adlai Holler64e13832020-10-13 08:21:56 -0400725//////////////////////////////////////////////////////////////////////////////
726
727static GrBackendTexture create_and_update_compressed_backend_texture(
728 GrDirectContext* dContext,
729 SkISize dimensions,
730 const GrBackendFormat& backendFormat,
731 GrMipmapped mipMapped,
732 GrProtected isProtected,
733 sk_sp<GrRefCntedCallback> finishedCallback,
Brian Salomon71283232021-04-08 12:45:58 -0400734 const void* data,
735 size_t size) {
Adlai Holler64e13832020-10-13 08:21:56 -0400736 GrGpu* gpu = dContext->priv().getGpu();
737
738 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
739 mipMapped, isProtected);
740 if (!beTex.isValid()) {
741 return {};
742 }
743
744 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
Brian Salomon71283232021-04-08 12:45:58 -0400745 beTex, std::move(finishedCallback), data, size)) {
Adlai Holler64e13832020-10-13 08:21:56 -0400746 dContext->deleteBackendTexture(beTex);
747 return {};
748 }
749 return beTex;
750}
751
Brian Salomon71283232021-04-08 12:45:58 -0400752GrBackendTexture GrDirectContext::createCompressedBackendTexture(
753 int width, int height,
754 const GrBackendFormat& backendFormat,
755 const SkColor4f& color,
756 GrMipmapped mipmapped,
757 GrProtected isProtected,
758 GrGpuFinishedProc finishedProc,
759 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400760 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500761 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400762
763 if (this->abandoned()) {
764 return {};
765 }
766
Brian Salomon71283232021-04-08 12:45:58 -0400767 SkImage::CompressionType compression = GrBackendFormatToCompressionType(backendFormat);
768 if (compression == SkImage::CompressionType::kNone) {
769 return {};
770 }
771
772 size_t size = SkCompressedDataSize(compression,
773 {width, height},
774 nullptr,
775 mipmapped == GrMipmapped::kYes);
776 auto storage = std::make_unique<char[]>(size);
777 GrFillInCompressedData(compression, {width, height}, mipmapped, storage.get(), color);
778 return create_and_update_compressed_backend_texture(this,
779 {width, height},
780 backendFormat,
781 mipmapped,
782 isProtected,
783 std::move(finishedCallback),
784 storage.get(),
785 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400786}
787
Brian Salomon71283232021-04-08 12:45:58 -0400788GrBackendTexture GrDirectContext::createCompressedBackendTexture(
789 int width, int height,
790 SkImage::CompressionType compression,
791 const SkColor4f& color,
792 GrMipmapped mipMapped,
793 GrProtected isProtected,
794 GrGpuFinishedProc finishedProc,
795 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400796 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
797 GrBackendFormat format = this->compressedBackendFormat(compression);
798 return this->createCompressedBackendTexture(width, height, format, color,
799 mipMapped, isProtected, finishedProc,
800 finishedContext);
801}
802
Brian Salomon71283232021-04-08 12:45:58 -0400803GrBackendTexture GrDirectContext::createCompressedBackendTexture(
804 int width, int height,
805 const GrBackendFormat& backendFormat,
806 const void* compressedData,
807 size_t dataSize,
808 GrMipmapped mipMapped,
809 GrProtected isProtected,
810 GrGpuFinishedProc finishedProc,
811 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400812 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500813 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400814
815 if (this->abandoned()) {
816 return {};
817 }
818
Brian Salomon71283232021-04-08 12:45:58 -0400819 return create_and_update_compressed_backend_texture(this,
820 {width, height},
821 backendFormat,
822 mipMapped,
823 isProtected,
824 std::move(finishedCallback),
825 compressedData,
826 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400827}
828
Brian Salomon71283232021-04-08 12:45:58 -0400829GrBackendTexture GrDirectContext::createCompressedBackendTexture(
830 int width, int height,
831 SkImage::CompressionType compression,
832 const void* data, size_t dataSize,
833 GrMipmapped mipMapped,
834 GrProtected isProtected,
835 GrGpuFinishedProc finishedProc,
836 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400837 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
838 GrBackendFormat format = this->compressedBackendFormat(compression);
839 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
840 isProtected, finishedProc, finishedContext);
841}
842
843bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
844 const SkColor4f& color,
845 GrGpuFinishedProc finishedProc,
846 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500847 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400848
849 if (this->abandoned()) {
850 return false;
851 }
852
Brian Salomon71283232021-04-08 12:45:58 -0400853 SkImage::CompressionType compression =
854 GrBackendFormatToCompressionType(backendTexture.getBackendFormat());
855 if (compression == SkImage::CompressionType::kNone) {
856 return {};
857 }
858 size_t size = SkCompressedDataSize(compression,
859 backendTexture.dimensions(),
860 nullptr,
861 backendTexture.hasMipmaps());
862 SkAutoMalloc storage(size);
863 GrFillInCompressedData(compression,
864 backendTexture.dimensions(),
865 backendTexture.mipmapped(),
866 static_cast<char*>(storage.get()),
867 color);
868 return fGpu->updateCompressedBackendTexture(backendTexture,
869 std::move(finishedCallback),
870 storage.get(),
871 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400872}
873
874bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
875 const void* compressedData,
876 size_t dataSize,
877 GrGpuFinishedProc finishedProc,
878 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500879 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400880
881 if (this->abandoned()) {
882 return false;
883 }
884
885 if (!compressedData) {
886 return false;
887 }
888
Brian Salomon71283232021-04-08 12:45:58 -0400889 return fGpu->updateCompressedBackendTexture(backendTexture,
890 std::move(finishedCallback),
891 compressedData,
892 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400893}
894
Adlai Holler6d0745b2020-10-13 13:29:00 -0400895//////////////////////////////////////////////////////////////////////////////
896
897bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
898 const GrBackendSurfaceMutableState& state,
899 GrBackendSurfaceMutableState* previousState,
900 GrGpuFinishedProc finishedProc,
901 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500902 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400903
904 if (this->abandoned()) {
905 return false;
906 }
907
908 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
909}
910
911
912bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
913 const GrBackendSurfaceMutableState& state,
914 GrBackendSurfaceMutableState* previousState,
915 GrGpuFinishedProc finishedProc,
916 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500917 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400918
919 if (this->abandoned()) {
920 return false;
921 }
922
923 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
924 std::move(callback));
925}
926
927void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
928 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
929 // For the Vulkan backend we still must destroy the backend texture when the context is
930 // abandoned.
931 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
932 return;
933 }
934
935 fGpu->deleteBackendTexture(backendTex);
936}
937
938//////////////////////////////////////////////////////////////////////////////
939
940bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
941 return fGpu->precompileShader(key, data);
942}
943
944#ifdef SK_ENABLE_DUMP_GPU
945#include "include/core/SkString.h"
946#include "src/utils/SkJSONWriter.h"
947SkString GrDirectContext::dump() const {
948 SkDynamicMemoryWStream stream;
949 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
950 writer.beginObject();
951
952 writer.appendString("backend", GrBackendApiToStr(this->backend()));
953
954 writer.appendName("caps");
955 this->caps()->dumpJSON(&writer);
956
957 writer.appendName("gpu");
958 this->fGpu->dumpJSON(&writer);
959
960 writer.appendName("context");
961 this->dumpJSON(&writer);
962
963 // Flush JSON to the memory stream
964 writer.endObject();
965 writer.flush();
966
967 // Null terminate the JSON data in the memory stream
968 stream.write8(0);
969
970 // Allocate a string big enough to hold all the data, then copy out of the stream
971 SkString result(stream.bytesWritten());
972 stream.copyToAndReset(result.writable_str());
973 return result;
974}
975#endif
976
John Rosascoa9b348f2019-11-08 13:18:15 -0800977#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400978
Robert Phillipsf4f80112020-07-13 16:13:31 -0400979/*************************************************************************************************/
980sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500981 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500982 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500983}
984
Robert Phillipsf4f80112020-07-13 16:13:31 -0400985sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400986 return MakeGL(nullptr, options);
987}
988
Robert Phillipsf4f80112020-07-13 16:13:31 -0400989sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400990 GrContextOptions defaultOptions;
991 return MakeGL(nullptr, defaultOptions);
992}
993
Brian Salomon24069eb2020-06-24 10:19:52 -0400994#if GR_TEST_UTILS
995GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
996 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
997 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
998 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
999 // on the thing it captures. So we leak the context.
1000 struct GetErrorContext {
1001 SkRandom fRandom;
1002 GrGLFunction<GrGLGetErrorFn> fGetError;
1003 };
1004
1005 auto errorContext = new GetErrorContext;
1006
1007#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
1008 __lsan_ignore_object(errorContext);
1009#endif
1010
1011 errorContext->fGetError = original;
1012
1013 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
1014 GrGLenum error = errorContext->fGetError();
1015 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
1016 error = GR_GL_OUT_OF_MEMORY;
1017 }
1018 return error;
1019 });
1020}
1021#endif
1022
Robert Phillipsf4f80112020-07-13 16:13:31 -04001023sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
1024 const GrContextOptions& options) {
1025 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -04001026#if GR_TEST_UTILS
1027 if (options.fRandomGLOOM) {
1028 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
1029 copy->fFunctions.fGetError =
1030 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
1031#if GR_GL_CHECK_ERROR
1032 // Suppress logging GL errors since we'll be synthetically generating them.
1033 copy->suppressErrorLogging();
1034#endif
1035 glInterface = std::move(copy);
1036 }
1037#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -04001038 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
1039 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001040 return nullptr;
1041 }
Robert Phillipsf4f80112020-07-13 16:13:31 -04001042 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001043}
John Rosascoa9b348f2019-11-08 13:18:15 -08001044#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001045
Robert Phillipsf4f80112020-07-13 16:13:31 -04001046/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001047sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1048 GrContextOptions defaultOptions;
1049 return MakeMock(mockOptions, defaultOptions);
1050}
1051
1052sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1053 const GrContextOptions& options) {
1054 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1055
1056 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1057 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001058 return nullptr;
1059 }
Chris Daltona378b452019-12-11 13:24:11 -05001060
Robert Phillipsf4f80112020-07-13 16:13:31 -04001061 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001062}
1063
Greg Danielb4d89562018-10-03 18:44:49 +00001064#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001065/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001066sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1067 GrContextOptions defaultOptions;
1068 return MakeVulkan(backendContext, defaultOptions);
1069}
1070
1071sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1072 const GrContextOptions& options) {
1073 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1074
1075 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1076 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001077 return nullptr;
1078 }
1079
Robert Phillipsf4f80112020-07-13 16:13:31 -04001080 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001081}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001082#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001083
1084#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001085/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001086sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001087 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001088 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001089}
1090
Jim Van Verth351c9b52020-11-12 15:21:11 -05001091sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1092 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001093 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001094
Jim Van Verth351c9b52020-11-12 15:21:11 -05001095 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001096 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001097 return nullptr;
1098 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001099
Robert Phillipsf4f80112020-07-13 16:13:31 -04001100 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001101}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001102
1103// deprecated
1104sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1105 GrContextOptions defaultOptions;
1106 return MakeMetal(device, queue, defaultOptions);
1107}
1108
1109// deprecated
1110// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1111sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1112 const GrContextOptions& options) {
1113 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1114 GrMtlBackendContext backendContext = {};
1115 backendContext.fDevice.reset(device);
1116 backendContext.fQueue.reset(queue);
1117
1118 return GrDirectContext::MakeMetal(backendContext, options);
1119}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001120#endif
1121
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001122#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001123/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001124sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1125 GrContextOptions defaultOptions;
1126 return MakeDirect3D(backendContext, defaultOptions);
1127}
1128
1129sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1130 const GrContextOptions& options) {
1131 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1132
1133 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1134 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001135 return nullptr;
1136 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001137
Robert Phillipsf4f80112020-07-13 16:13:31 -04001138 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001139}
1140#endif
1141
Stephen White985741a2019-07-18 11:43:45 -04001142#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001143/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001144sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001145 GrContextOptions defaultOptions;
1146 return MakeDawn(device, defaultOptions);
1147}
1148
Robert Phillipsf4f80112020-07-13 16:13:31 -04001149sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1150 const GrContextOptions& options) {
1151 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001152
Robert Phillipsf4f80112020-07-13 16:13:31 -04001153 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1154 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001155 return nullptr;
1156 }
1157
Robert Phillipsf4f80112020-07-13 16:13:31 -04001158 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001159}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001160
Stephen White985741a2019-07-18 11:43:45 -04001161#endif