blob: 36b5c52c1980452cee96837c76e1b4d313bc5be5 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Brian Salomon71283232021-04-08 12:45:58 -040013#include "src/core/SkAutoMalloc.h"
Adlai Holler9555f292020-10-09 09:41:14 -040014#include "src/core/SkTaskGroup.h"
Brian Salomon71283232021-04-08 12:45:58 -040015#include "src/gpu/GrBackendUtils.h"
Adlai Holler9555f292020-10-09 09:41:14 -040016#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040018#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040019#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040021#include "src/gpu/GrResourceProvider.h"
22#include "src/gpu/GrShaderUtils.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040023#include "src/gpu/GrSurfaceContext.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040024#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/effects/GrSkSLFP.h"
26#include "src/gpu/gl/GrGLGpu.h"
27#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040028#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040029#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050030#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040031#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050032#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050033#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050034#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050035#endif
36#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050037#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050038#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050039#ifdef SK_DIRECT3D
40#include "src/gpu/d3d/GrD3DGpu.h"
41#endif
Stephen White985741a2019-07-18 11:43:45 -040042#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050043#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040044#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040045#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050046
Brian Salomon24069eb2020-06-24 10:19:52 -040047#if GR_TEST_UTILS
48# include "include/utils/SkRandom.h"
49# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
50# include <sanitizer/lsan_interface.h>
51# endif
52#endif
53
Adlai Holler9555f292020-10-09 09:41:14 -040054#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
55
Robert Phillipse7a959d2021-03-11 14:44:42 -050056GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050057 static std::atomic<uint32_t> nextID{1};
58 uint32_t id;
59 do {
60 id = nextID.fetch_add(1, std::memory_order_relaxed);
61 } while (id == SK_InvalidUniqueID);
62 return DirectContextID(id);
63}
64
Robert Phillipsad248452020-06-30 09:27:52 -040065GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040066 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050067 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040068}
Robert Phillipsa3457b82018-03-08 11:30:12 -050069
Robert Phillipsad248452020-06-30 09:27:52 -040070GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040071 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040072 // this if-test protects against the case where the context is being destroyed
73 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040074 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040075 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050076 }
Adlai Holler9555f292020-10-09 09:41:14 -040077
Greg Daniela89b4302021-01-29 10:48:40 -050078 // We need to make sure all work is finished on the gpu before we start releasing resources.
79 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
80
Adlai Holler9555f292020-10-09 09:41:14 -040081 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040082
83 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
84 if (fResourceCache) {
85 fResourceCache->releaseAll();
86 }
Brian Salomon91a88f02021-02-04 15:34:32 -050087 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
88 // async pixel result don't try to destroy buffers off thread.
89 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040090}
Robert Phillipsa3457b82018-03-08 11:30:12 -050091
Adlai Holler61a591c2020-10-12 12:38:33 -040092sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
93 return INHERITED::threadSafeProxy();
94}
95
Adlai Hollera7a40442020-10-09 09:49:42 -040096void GrDirectContext::resetGLTextureBindings() {
97 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
98 return;
99 }
100 fGpu->resetTextureBindings();
101}
102
103void GrDirectContext::resetContext(uint32_t state) {
104 ASSERT_SINGLE_OWNER
105 fGpu->markContextDirty(state);
106}
107
Robert Phillipsad248452020-06-30 09:27:52 -0400108void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400109 if (INHERITED::abandoned()) {
110 return;
111 }
112
Robert Phillipsad248452020-06-30 09:27:52 -0400113 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400114
Greg Daniela89b4302021-01-29 10:48:40 -0500115 // We need to make sure all work is finished on the gpu before we start releasing resources.
116 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
117
Adlai Hollera7a40442020-10-09 09:49:42 -0400118 fStrikeCache->freeAll();
119
120 fMappedBufferManager->abandon();
121
122 fResourceProvider->abandon();
123
Robert Phillipseb999bc2020-11-03 08:41:47 -0500124 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400125 fResourceCache->abandonAll();
126
127 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
128
Brian Salomon91a88f02021-02-04 15:34:32 -0500129 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400130 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500131
Robert Phillips079455c2020-08-11 15:18:46 -0400132 if (fSmallPathAtlasMgr) {
133 fSmallPathAtlasMgr->reset();
134 }
Robert Phillipsad248452020-06-30 09:27:52 -0400135 fAtlasManager->freeAll();
136}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500137
Adlai Hollera7a40442020-10-09 09:49:42 -0400138bool GrDirectContext::abandoned() {
139 if (INHERITED::abandoned()) {
140 return true;
141 }
142
143 if (fGpu && fGpu->isDeviceLost()) {
144 this->abandonContext();
145 return true;
146 }
147 return false;
148}
149
Adlai Holler61a591c2020-10-12 12:38:33 -0400150bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
151
Robert Phillipsad248452020-06-30 09:27:52 -0400152void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400153 if (INHERITED::abandoned()) {
154 return;
155 }
156
157 INHERITED::abandonContext();
158
Greg Daniela89b4302021-01-29 10:48:40 -0500159 // We need to make sure all work is finished on the gpu before we start releasing resources.
160 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
161
Adlai Holler61a591c2020-10-12 12:38:33 -0400162 fResourceProvider->abandon();
163
164 // Release all resources in the backend 3D API.
165 fResourceCache->releaseAll();
166
Brian Salomon91a88f02021-02-04 15:34:32 -0500167 // Must be after GrResourceCache::releaseAll().
168 fMappedBufferManager.reset();
169
Adlai Holler61a591c2020-10-12 12:38:33 -0400170 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400171 if (fSmallPathAtlasMgr) {
172 fSmallPathAtlasMgr->reset();
173 }
Robert Phillipsad248452020-06-30 09:27:52 -0400174 fAtlasManager->freeAll();
175}
Robert Phillips6db27c22019-05-01 10:43:56 -0400176
Robert Phillipsad248452020-06-30 09:27:52 -0400177void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400178 ASSERT_SINGLE_OWNER
179
180 if (this->abandoned()) {
181 return;
182 }
183
Robert Phillipsad248452020-06-30 09:27:52 -0400184 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400185 if (fSmallPathAtlasMgr) {
186 fSmallPathAtlasMgr->reset();
187 }
Robert Phillipsad248452020-06-30 09:27:52 -0400188 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500189
Adlai Holler4aa4c602020-10-12 13:58:52 -0400190 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
191 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
192 fStrikeCache->freeAll();
193
194 this->drawingManager()->freeGpuResources();
195
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400196 fResourceCache->purgeUnlockedResources();
Robert Phillipsad248452020-06-30 09:27:52 -0400197}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500198
Robert Phillipsad248452020-06-30 09:27:52 -0400199bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400200 ASSERT_SINGLE_OWNER
201 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400202 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500203 }
204
Robert Phillipsae67c522021-03-03 11:03:38 -0500205 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400206 if (!INHERITED::init()) {
207 return false;
208 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500209
Adlai Holler9555f292020-10-09 09:41:14 -0400210 SkASSERT(this->getTextBlobCache());
211 SkASSERT(this->threadSafeCache());
212
213 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400214 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
215 this->directContextID(),
216 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400217 fResourceCache->setProxyProvider(this->proxyProvider());
218 fResourceCache->setThreadSafeCache(this->threadSafeCache());
Adlai Hollerb34270e2021-04-16 11:23:52 -0400219#if GR_TEST_UTILS
220 if (this->options().fResourceCacheLimitOverride != -1) {
221 this->setResourceCacheLimit(this->options().fResourceCacheLimitOverride);
222 }
223#endif
Adlai Holler9555f292020-10-09 09:41:14 -0400224 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
225 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500226 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400227
228 fDidTestPMConversions = false;
229
230 // DDL TODO: we need to think through how the task group & persistent cache
231 // get passed on to/shared between all the DDLRecorders created with this context.
232 if (this->options().fExecutor) {
233 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
234 }
235
236 fPersistentCache = this->options().fPersistentCache;
237 fShaderErrorHandler = this->options().fShaderErrorHandler;
238 if (!fShaderErrorHandler) {
239 fShaderErrorHandler = GrShaderUtils::DefaultShaderErrorHandler();
240 }
241
Robert Phillipsad248452020-06-30 09:27:52 -0400242 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
243 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
244 // multitexturing supported only if range can represent the index + texcoords fully
245 !(this->caps()->shaderCaps()->floatIs32Bits() ||
246 this->caps()->shaderCaps()->integerSupport())) {
247 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
248 } else {
249 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
250 }
251
252 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
253
Robert Phillips3262bc82020-08-10 12:11:58 -0400254 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
255 this->options().fGlyphCacheTextureMaximumBytes,
256 allowMultitexturing);
257 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400258
259 return true;
260}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500261
Adlai Holler3a508e92020-10-12 13:58:01 -0400262void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
263 ASSERT_SINGLE_OWNER
264
265 if (resourceCount) {
266 *resourceCount = fResourceCache->getBudgetedResourceCount();
267 }
268 if (resourceBytes) {
269 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
270 }
271}
272
273size_t GrDirectContext::getResourceCachePurgeableBytes() const {
274 ASSERT_SINGLE_OWNER
275 return fResourceCache->getPurgeableBytes();
276}
277
278void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
279 ASSERT_SINGLE_OWNER
280 if (maxResources) {
281 *maxResources = -1;
282 }
283 if (maxResourceBytes) {
284 *maxResourceBytes = this->getResourceCacheLimit();
285 }
286}
287
288size_t GrDirectContext::getResourceCacheLimit() const {
289 ASSERT_SINGLE_OWNER
290 return fResourceCache->getMaxResourceBytes();
291}
292
293void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
294 ASSERT_SINGLE_OWNER
295 this->setResourceCacheLimit(maxResourceBytes);
296}
297
298void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
299 ASSERT_SINGLE_OWNER
300 fResourceCache->setLimit(maxResourceBytes);
301}
302
Adlai Holler4aa4c602020-10-12 13:58:52 -0400303void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
304 ASSERT_SINGLE_OWNER
305
306 if (this->abandoned()) {
307 return;
308 }
309
310 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
311 fResourceCache->purgeAsNeeded();
312
313 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
314 // place to purge stale blobs
315 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400316
317 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400318}
319
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400320void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed,
321 bool scratchResourcesOnly) {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400322 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
323
324 ASSERT_SINGLE_OWNER
325
326 if (this->abandoned()) {
327 return;
328 }
329
330 this->checkAsyncWorkCompletion();
331 fMappedBufferManager->process();
332 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
333
334 fResourceCache->purgeAsNeeded();
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400335 fResourceCache->purgeResourcesNotUsedSince(purgeTime, scratchResourcesOnly);
Adlai Holler4aa4c602020-10-12 13:58:52 -0400336
Adlai Holler4aa4c602020-10-12 13:58:52 -0400337 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
338 // place to purge stale blobs
339 this->getTextBlobCache()->purgeStaleBlobs();
340}
341
342void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
343 ASSERT_SINGLE_OWNER
344
345 if (this->abandoned()) {
346 return;
347 }
348
349 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
350}
351
Adlai Holler3acc69a2020-10-13 08:20:51 -0400352////////////////////////////////////////////////////////////////////////////////
353bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
354 bool deleteSemaphoresAfterWait) {
Greg Daniel063fdce2021-05-06 19:45:55 +0000355 if (!fGpu || !fGpu->caps()->semaphoreSupport()) {
Adlai Holler3acc69a2020-10-13 08:20:51 -0400356 return false;
357 }
358 GrWrapOwnership ownership =
359 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
360 for (int i = 0; i < numSemaphores; ++i) {
361 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
362 waitSemaphores[i], GrResourceProvider::SemaphoreWrapType::kWillWait, ownership);
363 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
364 // to begin with. Therefore, it is fine to not wait on it.
365 if (sema) {
366 fGpu->waitSemaphore(sema.get());
367 }
368 }
369 return true;
370}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400371
Robert Phillips5edf5102020-08-10 16:30:36 -0400372GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400373 if (!fSmallPathAtlasMgr) {
374 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
375
376 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
377 }
378
379 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
380 return nullptr;
381 }
382
383 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400384}
385
Adlai Holler3acc69a2020-10-13 08:20:51 -0400386////////////////////////////////////////////////////////////////////////////////
387
388GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
389 ASSERT_SINGLE_OWNER
390 if (this->abandoned()) {
391 if (info.fFinishedProc) {
392 info.fFinishedProc(info.fFinishedContext);
393 }
394 if (info.fSubmittedProc) {
395 info.fSubmittedProc(info.fSubmittedContext, false);
396 }
397 return GrSemaphoresSubmitted::kNo;
398 }
399
Robert Phillips80bfda82020-11-12 09:23:36 -0500400 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
401 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400402}
403
404bool GrDirectContext::submit(bool syncCpu) {
405 ASSERT_SINGLE_OWNER
406 if (this->abandoned()) {
407 return false;
408 }
409
410 if (!fGpu) {
411 return false;
412 }
413
414 return fGpu->submitToGpu(syncCpu);
415}
416
417////////////////////////////////////////////////////////////////////////////////
418
419void GrDirectContext::checkAsyncWorkCompletion() {
420 if (fGpu) {
421 fGpu->checkFinishProcs();
422 }
423}
424
Greg Daniela89b4302021-01-29 10:48:40 -0500425void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
426 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
427 fGpu->finishOutstandingGpuWork();
428 this->checkAsyncWorkCompletion();
429 }
430}
431
Adlai Holler3acc69a2020-10-13 08:20:51 -0400432////////////////////////////////////////////////////////////////////////////////
433
434void GrDirectContext::storeVkPipelineCacheData() {
435 if (fGpu) {
436 fGpu->storeVkPipelineCacheData();
437 }
438}
439
440////////////////////////////////////////////////////////////////////////////////
441
442bool GrDirectContext::supportsDistanceFieldText() const {
443 return this->caps()->shaderCaps()->supportsDistanceFieldText();
444}
445
446//////////////////////////////////////////////////////////////////////////////
447
448void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
449 ASSERT_SINGLE_OWNER
450 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
451 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
452 this->getTextBlobCache()->usedBytes());
453}
454
Adlai Holler98dd0042020-10-13 10:04:00 -0400455GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
456 const GrBackendFormat& backendFormat,
457 GrMipmapped mipMapped,
458 GrRenderable renderable,
459 GrProtected isProtected) {
460 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
461 if (this->abandoned()) {
462 return GrBackendTexture();
463 }
464
465 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
466 mipMapped, isProtected);
467}
468
469GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
470 SkColorType skColorType,
471 GrMipmapped mipMapped,
472 GrRenderable renderable,
473 GrProtected isProtected) {
474 if (this->abandoned()) {
475 return GrBackendTexture();
476 }
477
478 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
479
480 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
481}
482
Brian Salomon71283232021-04-08 12:45:58 -0400483static GrBackendTexture create_and_clear_backend_texture(GrDirectContext* dContext,
484 SkISize dimensions,
485 const GrBackendFormat& backendFormat,
486 GrMipmapped mipMapped,
487 GrRenderable renderable,
488 GrProtected isProtected,
489 sk_sp<GrRefCntedCallback> finishedCallback,
490 std::array<float, 4> color) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400491 GrGpu* gpu = dContext->priv().getGpu();
Adlai Holler98dd0042020-10-13 10:04:00 -0400492 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
493 mipMapped, isProtected);
494 if (!beTex.isValid()) {
495 return {};
496 }
497
Brian Salomon71283232021-04-08 12:45:58 -0400498 if (!dContext->priv().getGpu()->clearBackendTexture(beTex,
499 std::move(finishedCallback),
500 color)) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400501 dContext->deleteBackendTexture(beTex);
502 return {};
503 }
504 return beTex;
505}
506
Brian Salomonea1d39b2021-04-01 17:06:52 -0400507static bool update_texture_with_pixmaps(GrDirectContext* context,
508 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500509 int numLevels,
510 const GrBackendTexture& backendTexture,
511 GrSurfaceOrigin textureOrigin,
512 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400513 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
514 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500515
Brian Salomonea1d39b2021-04-01 17:06:52 -0400516 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
517 return false;
518 }
519
520 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
521 kBorrow_GrWrapOwnership,
522 GrWrapCacheable::kNo,
523 kRW_GrIOType,
524 std::move(finishedCallback));
525 if (!proxy) {
526 return false;
527 }
528
529 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
530 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
531 GrSurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
532 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500533 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400534 tmpSrc[i] = src[i];
535 }
Brian Salomon75ee7372021-04-06 15:04:35 -0400536 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels)) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400537 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500538 }
539
Brian Salomonea1d39b2021-04-01 17:06:52 -0400540 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
541 GrFlushInfo info;
542 context->priv().drawingManager()->flushSurfaces({&p, 1},
543 SkSurface::BackendSurfaceAccess::kNoAccess,
544 info,
545 nullptr);
546 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500547}
548
Adlai Holler98dd0042020-10-13 10:04:00 -0400549GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
550 const GrBackendFormat& backendFormat,
551 const SkColor4f& color,
552 GrMipmapped mipMapped,
553 GrRenderable renderable,
554 GrProtected isProtected,
555 GrGpuFinishedProc finishedProc,
556 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500557 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400558
559 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
560 if (this->abandoned()) {
561 return {};
562 }
563
Brian Salomon71283232021-04-08 12:45:58 -0400564 return create_and_clear_backend_texture(this,
565 {width, height},
566 backendFormat,
567 mipMapped,
568 renderable,
569 isProtected,
570 std::move(finishedCallback),
571 color.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400572}
573
574GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
575 SkColorType skColorType,
576 const SkColor4f& color,
577 GrMipmapped mipMapped,
578 GrRenderable renderable,
579 GrProtected isProtected,
580 GrGpuFinishedProc finishedProc,
581 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500582 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400583
584 if (this->abandoned()) {
585 return {};
586 }
587
588 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
589 if (!format.isValid()) {
590 return {};
591 }
592
593 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
594 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
595
Brian Salomon71283232021-04-08 12:45:58 -0400596 return create_and_clear_backend_texture(this,
597 {width, height},
598 format,
599 mipMapped,
600 renderable,
601 isProtected,
602 std::move(finishedCallback),
603 swizzledColor.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400604}
605
606GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
607 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500608 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400609 GrRenderable renderable,
610 GrProtected isProtected,
611 GrGpuFinishedProc finishedProc,
612 GrGpuFinishedContext finishedContext) {
613 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
614
Brian Salomon694ff172020-11-04 16:54:28 -0500615 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400616
617 if (this->abandoned()) {
618 return {};
619 }
620
621 if (!srcData || numProvidedLevels <= 0) {
622 return {};
623 }
624
Adlai Holler98dd0042020-10-13 10:04:00 -0400625 SkColorType colorType = srcData[0].colorType();
626
627 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400628 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400629 mipMapped = GrMipmapped::kYes;
630 }
631
Adlai Holler98dd0042020-10-13 10:04:00 -0400632 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500633 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
634 srcData[0].height(),
635 backendFormat,
636 mipMapped,
637 renderable,
638 isProtected);
639 if (!beTex.isValid()) {
640 return {};
641 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400642 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500643 srcData,
644 numProvidedLevels,
645 beTex,
646 textureOrigin,
647 std::move(finishedCallback))) {
648 this->deleteBackendTexture(beTex);
649 return {};
650 }
651 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400652}
653
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400654bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
655 const SkColor4f& color,
656 GrGpuFinishedProc finishedProc,
657 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500658 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400659
660 if (this->abandoned()) {
661 return false;
662 }
663
Brian Salomon71283232021-04-08 12:45:58 -0400664 return fGpu->clearBackendTexture(backendTexture, std::move(finishedCallback), color.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400665}
666
667bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
668 SkColorType skColorType,
669 const SkColor4f& color,
670 GrGpuFinishedProc finishedProc,
671 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500672 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400673
674 if (this->abandoned()) {
675 return false;
676 }
677
678 GrBackendFormat format = backendTexture.getBackendFormat();
679 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
680
681 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
682 return false;
683 }
684
685 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
Brian Salomon71283232021-04-08 12:45:58 -0400686 SkColor4f swizzledColor = swizzle.applyTo(color);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400687
Brian Salomon71283232021-04-08 12:45:58 -0400688 return fGpu->clearBackendTexture(backendTexture,
689 std::move(finishedCallback),
690 swizzledColor.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400691}
692
693bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
694 const SkPixmap srcData[],
695 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500696 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400697 GrGpuFinishedProc finishedProc,
698 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500699 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400700
701 if (this->abandoned()) {
702 return false;
703 }
704
705 if (!srcData || numLevels <= 0) {
706 return false;
707 }
708
Brian Salomonea1d39b2021-04-01 17:06:52 -0400709 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400710 int numExpectedLevels = 1;
711 if (backendTexture.hasMipmaps()) {
712 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
713 backendTexture.height()) + 1;
714 }
715 if (numLevels != numExpectedLevels) {
716 return false;
717 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400718 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500719 srcData,
720 numLevels,
721 backendTexture,
722 textureOrigin,
723 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400724}
725
Adlai Holler64e13832020-10-13 08:21:56 -0400726//////////////////////////////////////////////////////////////////////////////
727
728static GrBackendTexture create_and_update_compressed_backend_texture(
729 GrDirectContext* dContext,
730 SkISize dimensions,
731 const GrBackendFormat& backendFormat,
732 GrMipmapped mipMapped,
733 GrProtected isProtected,
734 sk_sp<GrRefCntedCallback> finishedCallback,
Brian Salomon71283232021-04-08 12:45:58 -0400735 const void* data,
736 size_t size) {
Adlai Holler64e13832020-10-13 08:21:56 -0400737 GrGpu* gpu = dContext->priv().getGpu();
738
739 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
740 mipMapped, isProtected);
741 if (!beTex.isValid()) {
742 return {};
743 }
744
745 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
Brian Salomon71283232021-04-08 12:45:58 -0400746 beTex, std::move(finishedCallback), data, size)) {
Adlai Holler64e13832020-10-13 08:21:56 -0400747 dContext->deleteBackendTexture(beTex);
748 return {};
749 }
750 return beTex;
751}
752
Brian Salomon71283232021-04-08 12:45:58 -0400753GrBackendTexture GrDirectContext::createCompressedBackendTexture(
754 int width, int height,
755 const GrBackendFormat& backendFormat,
756 const SkColor4f& color,
757 GrMipmapped mipmapped,
758 GrProtected isProtected,
759 GrGpuFinishedProc finishedProc,
760 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400761 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500762 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400763
764 if (this->abandoned()) {
765 return {};
766 }
767
Brian Salomon71283232021-04-08 12:45:58 -0400768 SkImage::CompressionType compression = GrBackendFormatToCompressionType(backendFormat);
769 if (compression == SkImage::CompressionType::kNone) {
770 return {};
771 }
772
773 size_t size = SkCompressedDataSize(compression,
774 {width, height},
775 nullptr,
776 mipmapped == GrMipmapped::kYes);
777 auto storage = std::make_unique<char[]>(size);
778 GrFillInCompressedData(compression, {width, height}, mipmapped, storage.get(), color);
779 return create_and_update_compressed_backend_texture(this,
780 {width, height},
781 backendFormat,
782 mipmapped,
783 isProtected,
784 std::move(finishedCallback),
785 storage.get(),
786 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400787}
788
Brian Salomon71283232021-04-08 12:45:58 -0400789GrBackendTexture GrDirectContext::createCompressedBackendTexture(
790 int width, int height,
791 SkImage::CompressionType compression,
792 const SkColor4f& color,
793 GrMipmapped mipMapped,
794 GrProtected isProtected,
795 GrGpuFinishedProc finishedProc,
796 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400797 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
798 GrBackendFormat format = this->compressedBackendFormat(compression);
799 return this->createCompressedBackendTexture(width, height, format, color,
800 mipMapped, isProtected, finishedProc,
801 finishedContext);
802}
803
Brian Salomon71283232021-04-08 12:45:58 -0400804GrBackendTexture GrDirectContext::createCompressedBackendTexture(
805 int width, int height,
806 const GrBackendFormat& backendFormat,
807 const void* compressedData,
808 size_t dataSize,
809 GrMipmapped mipMapped,
810 GrProtected isProtected,
811 GrGpuFinishedProc finishedProc,
812 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400813 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500814 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400815
816 if (this->abandoned()) {
817 return {};
818 }
819
Brian Salomon71283232021-04-08 12:45:58 -0400820 return create_and_update_compressed_backend_texture(this,
821 {width, height},
822 backendFormat,
823 mipMapped,
824 isProtected,
825 std::move(finishedCallback),
826 compressedData,
827 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400828}
829
Brian Salomon71283232021-04-08 12:45:58 -0400830GrBackendTexture GrDirectContext::createCompressedBackendTexture(
831 int width, int height,
832 SkImage::CompressionType compression,
833 const void* data, size_t dataSize,
834 GrMipmapped mipMapped,
835 GrProtected isProtected,
836 GrGpuFinishedProc finishedProc,
837 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400838 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
839 GrBackendFormat format = this->compressedBackendFormat(compression);
840 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
841 isProtected, finishedProc, finishedContext);
842}
843
844bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
845 const SkColor4f& color,
846 GrGpuFinishedProc finishedProc,
847 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500848 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400849
850 if (this->abandoned()) {
851 return false;
852 }
853
Brian Salomon71283232021-04-08 12:45:58 -0400854 SkImage::CompressionType compression =
855 GrBackendFormatToCompressionType(backendTexture.getBackendFormat());
856 if (compression == SkImage::CompressionType::kNone) {
857 return {};
858 }
859 size_t size = SkCompressedDataSize(compression,
860 backendTexture.dimensions(),
861 nullptr,
862 backendTexture.hasMipmaps());
863 SkAutoMalloc storage(size);
864 GrFillInCompressedData(compression,
865 backendTexture.dimensions(),
866 backendTexture.mipmapped(),
867 static_cast<char*>(storage.get()),
868 color);
869 return fGpu->updateCompressedBackendTexture(backendTexture,
870 std::move(finishedCallback),
871 storage.get(),
872 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400873}
874
875bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
876 const void* compressedData,
877 size_t dataSize,
878 GrGpuFinishedProc finishedProc,
879 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500880 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400881
882 if (this->abandoned()) {
883 return false;
884 }
885
886 if (!compressedData) {
887 return false;
888 }
889
Brian Salomon71283232021-04-08 12:45:58 -0400890 return fGpu->updateCompressedBackendTexture(backendTexture,
891 std::move(finishedCallback),
892 compressedData,
893 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400894}
895
Adlai Holler6d0745b2020-10-13 13:29:00 -0400896//////////////////////////////////////////////////////////////////////////////
897
898bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
899 const GrBackendSurfaceMutableState& state,
900 GrBackendSurfaceMutableState* previousState,
901 GrGpuFinishedProc finishedProc,
902 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500903 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400904
905 if (this->abandoned()) {
906 return false;
907 }
908
909 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
910}
911
912
913bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
914 const GrBackendSurfaceMutableState& state,
915 GrBackendSurfaceMutableState* previousState,
916 GrGpuFinishedProc finishedProc,
917 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500918 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400919
920 if (this->abandoned()) {
921 return false;
922 }
923
924 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
925 std::move(callback));
926}
927
928void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
929 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
930 // For the Vulkan backend we still must destroy the backend texture when the context is
931 // abandoned.
932 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
933 return;
934 }
935
936 fGpu->deleteBackendTexture(backendTex);
937}
938
939//////////////////////////////////////////////////////////////////////////////
940
941bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
942 return fGpu->precompileShader(key, data);
943}
944
945#ifdef SK_ENABLE_DUMP_GPU
946#include "include/core/SkString.h"
947#include "src/utils/SkJSONWriter.h"
948SkString GrDirectContext::dump() const {
949 SkDynamicMemoryWStream stream;
950 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
951 writer.beginObject();
952
953 writer.appendString("backend", GrBackendApiToStr(this->backend()));
954
955 writer.appendName("caps");
956 this->caps()->dumpJSON(&writer);
957
958 writer.appendName("gpu");
959 this->fGpu->dumpJSON(&writer);
960
961 writer.appendName("context");
962 this->dumpJSON(&writer);
963
964 // Flush JSON to the memory stream
965 writer.endObject();
966 writer.flush();
967
968 // Null terminate the JSON data in the memory stream
969 stream.write8(0);
970
971 // Allocate a string big enough to hold all the data, then copy out of the stream
972 SkString result(stream.bytesWritten());
973 stream.copyToAndReset(result.writable_str());
974 return result;
975}
976#endif
977
John Rosascoa9b348f2019-11-08 13:18:15 -0800978#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400979
Robert Phillipsf4f80112020-07-13 16:13:31 -0400980/*************************************************************************************************/
981sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500982 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500983 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500984}
985
Robert Phillipsf4f80112020-07-13 16:13:31 -0400986sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400987 return MakeGL(nullptr, options);
988}
989
Robert Phillipsf4f80112020-07-13 16:13:31 -0400990sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400991 GrContextOptions defaultOptions;
992 return MakeGL(nullptr, defaultOptions);
993}
994
Brian Salomon24069eb2020-06-24 10:19:52 -0400995#if GR_TEST_UTILS
996GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
997 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
998 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
999 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
1000 // on the thing it captures. So we leak the context.
1001 struct GetErrorContext {
1002 SkRandom fRandom;
1003 GrGLFunction<GrGLGetErrorFn> fGetError;
1004 };
1005
1006 auto errorContext = new GetErrorContext;
1007
1008#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
1009 __lsan_ignore_object(errorContext);
1010#endif
1011
1012 errorContext->fGetError = original;
1013
1014 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
1015 GrGLenum error = errorContext->fGetError();
1016 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
1017 error = GR_GL_OUT_OF_MEMORY;
1018 }
1019 return error;
1020 });
1021}
1022#endif
1023
Robert Phillipsf4f80112020-07-13 16:13:31 -04001024sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
1025 const GrContextOptions& options) {
1026 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -04001027#if GR_TEST_UTILS
1028 if (options.fRandomGLOOM) {
1029 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
1030 copy->fFunctions.fGetError =
1031 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
1032#if GR_GL_CHECK_ERROR
1033 // Suppress logging GL errors since we'll be synthetically generating them.
1034 copy->suppressErrorLogging();
1035#endif
1036 glInterface = std::move(copy);
1037 }
1038#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -04001039 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
1040 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001041 return nullptr;
1042 }
Robert Phillipsf4f80112020-07-13 16:13:31 -04001043 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001044}
John Rosascoa9b348f2019-11-08 13:18:15 -08001045#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001046
Robert Phillipsf4f80112020-07-13 16:13:31 -04001047/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001048sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1049 GrContextOptions defaultOptions;
1050 return MakeMock(mockOptions, defaultOptions);
1051}
1052
1053sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1054 const GrContextOptions& options) {
1055 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1056
1057 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1058 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001059 return nullptr;
1060 }
Chris Daltona378b452019-12-11 13:24:11 -05001061
Robert Phillipsf4f80112020-07-13 16:13:31 -04001062 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001063}
1064
Greg Danielb4d89562018-10-03 18:44:49 +00001065#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001066/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001067sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1068 GrContextOptions defaultOptions;
1069 return MakeVulkan(backendContext, defaultOptions);
1070}
1071
1072sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1073 const GrContextOptions& options) {
1074 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1075
1076 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1077 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001078 return nullptr;
1079 }
1080
Robert Phillipsf4f80112020-07-13 16:13:31 -04001081 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001082}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001083#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001084
1085#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001086/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001087sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001088 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001089 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001090}
1091
Jim Van Verth351c9b52020-11-12 15:21:11 -05001092sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1093 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001094 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001095
Jim Van Verth351c9b52020-11-12 15:21:11 -05001096 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001097 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001098 return nullptr;
1099 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001100
Robert Phillipsf4f80112020-07-13 16:13:31 -04001101 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001102}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001103
1104// deprecated
1105sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1106 GrContextOptions defaultOptions;
1107 return MakeMetal(device, queue, defaultOptions);
1108}
1109
1110// deprecated
1111// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1112sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1113 const GrContextOptions& options) {
1114 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1115 GrMtlBackendContext backendContext = {};
1116 backendContext.fDevice.reset(device);
1117 backendContext.fQueue.reset(queue);
1118
1119 return GrDirectContext::MakeMetal(backendContext, options);
1120}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001121#endif
1122
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001123#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001124/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001125sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1126 GrContextOptions defaultOptions;
1127 return MakeDirect3D(backendContext, defaultOptions);
1128}
1129
1130sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1131 const GrContextOptions& options) {
1132 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1133
1134 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1135 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001136 return nullptr;
1137 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001138
Robert Phillipsf4f80112020-07-13 16:13:31 -04001139 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001140}
1141#endif
1142
Stephen White985741a2019-07-18 11:43:45 -04001143#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001144/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001145sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001146 GrContextOptions defaultOptions;
1147 return MakeDawn(device, defaultOptions);
1148}
1149
Robert Phillipsf4f80112020-07-13 16:13:31 -04001150sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1151 const GrContextOptions& options) {
1152 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001153
Robert Phillipsf4f80112020-07-13 16:13:31 -04001154 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1155 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001156 return nullptr;
1157 }
1158
Robert Phillipsf4f80112020-07-13 16:13:31 -04001159 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001160}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001161
Stephen White985741a2019-07-18 11:43:45 -04001162#endif