blob: 80eca7c703951f0c591288d89040c6eb82e31e09 [file] [log] [blame]
Robert Phillipsa3457b82018-03-08 11:30:12 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel54bfb182018-11-20 17:12:36 -05008
Robert Phillipsb7bfbc22020-07-01 12:55:01 -04009#include "include/gpu/GrDirectContext.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050010
Adlai Holler3acc69a2020-10-13 08:20:51 -040011#include "include/core/SkTraceMemoryDump.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/gpu/GrContextThreadSafeProxy.h"
Brian Salomon71283232021-04-08 12:45:58 -040013#include "src/core/SkAutoMalloc.h"
Adlai Holler9555f292020-10-09 09:41:14 -040014#include "src/core/SkTaskGroup.h"
Brian Salomon71283232021-04-08 12:45:58 -040015#include "src/gpu/GrBackendUtils.h"
Adlai Holler9555f292020-10-09 09:41:14 -040016#include "src/gpu/GrClientMappedBufferManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/GrContextThreadSafeProxyPriv.h"
Adlai Hollera0693042020-10-14 11:23:11 -040018#include "src/gpu/GrDirectContextPriv.h"
Adlai Holler4aa4c602020-10-12 13:58:52 -040019#include "src/gpu/GrDrawingManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/GrGpu.h"
Adlai Holler9555f292020-10-09 09:41:14 -040021#include "src/gpu/GrResourceProvider.h"
22#include "src/gpu/GrShaderUtils.h"
Robert Phillips53eaa642021-08-10 13:49:51 -040023#include "src/gpu/SurfaceContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/effects/GrSkSLFP.h"
25#include "src/gpu/gl/GrGLGpu.h"
26#include "src/gpu/mock/GrMockGpu.h"
Robert Phillips5edf5102020-08-10 16:30:36 -040027#include "src/gpu/ops/GrSmallPathAtlasMgr.h"
Robert Phillipse19babf2020-04-06 13:57:30 -040028#include "src/gpu/text/GrAtlasManager.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/text/GrStrikeCache.h"
Brian Salomonea1d39b2021-04-01 17:06:52 -040030#include "src/image/SkImage_GpuBase.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050031#ifdef SK_METAL
Jim Van Verth351c9b52020-11-12 15:21:11 -050032#include "include/gpu/mtl/GrMtlBackendContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050033#include "src/gpu/mtl/GrMtlTrampoline.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050034#endif
35#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050036#include "src/gpu/vk/GrVkGpu.h"
Robert Phillipsa3457b82018-03-08 11:30:12 -050037#endif
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050038#ifdef SK_DIRECT3D
39#include "src/gpu/d3d/GrD3DGpu.h"
40#endif
Stephen White985741a2019-07-18 11:43:45 -040041#ifdef SK_DAWN
Mike Klein52337de2019-07-25 09:00:52 -050042#include "src/gpu/dawn/GrDawnGpu.h"
Stephen White985741a2019-07-18 11:43:45 -040043#endif
Adlai Holler6d0745b2020-10-13 13:29:00 -040044#include <memory>
Robert Phillipsa3457b82018-03-08 11:30:12 -050045
Brian Salomon24069eb2020-06-24 10:19:52 -040046#if GR_TEST_UTILS
47# include "include/utils/SkRandom.h"
48# if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
49# include <sanitizer/lsan_interface.h>
50# endif
51#endif
52
Adlai Holler9555f292020-10-09 09:41:14 -040053#define ASSERT_SINGLE_OWNER GR_ASSERT_SINGLE_OWNER(this->singleOwner())
54
Robert Phillipse7a959d2021-03-11 14:44:42 -050055GrDirectContext::DirectContextID GrDirectContext::DirectContextID::Next() {
Robert Phillipsedff4672021-03-11 09:16:25 -050056 static std::atomic<uint32_t> nextID{1};
57 uint32_t id;
58 do {
59 id = nextID.fetch_add(1, std::memory_order_relaxed);
60 } while (id == SK_InvalidUniqueID);
61 return DirectContextID(id);
62}
63
Robert Phillipsad248452020-06-30 09:27:52 -040064GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options)
Robert Phillips23070582021-03-31 17:04:48 -040065 : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options), false)
Robert Phillipse7a959d2021-03-11 14:44:42 -050066 , fDirectContextID(DirectContextID::Next()) {
Robert Phillipsad248452020-06-30 09:27:52 -040067}
Robert Phillipsa3457b82018-03-08 11:30:12 -050068
Robert Phillipsad248452020-06-30 09:27:52 -040069GrDirectContext::~GrDirectContext() {
Adlai Holler9555f292020-10-09 09:41:14 -040070 ASSERT_SINGLE_OWNER
Robert Phillipsad248452020-06-30 09:27:52 -040071 // this if-test protects against the case where the context is being destroyed
72 // before having been fully created
Adlai Holler9555f292020-10-09 09:41:14 -040073 if (fGpu) {
Greg Daniel0a2464f2020-05-14 15:45:44 -040074 this->flushAndSubmit();
Robert Phillipsa3457b82018-03-08 11:30:12 -050075 }
Adlai Holler9555f292020-10-09 09:41:14 -040076
Greg Daniela89b4302021-01-29 10:48:40 -050077 // We need to make sure all work is finished on the gpu before we start releasing resources.
78 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/false);
79
Adlai Holler9555f292020-10-09 09:41:14 -040080 this->destroyDrawingManager();
Adlai Holler9555f292020-10-09 09:41:14 -040081
82 // Ideally we could just let the ptr drop, but resource cache queries this ptr in releaseAll.
83 if (fResourceCache) {
84 fResourceCache->releaseAll();
85 }
Brian Salomon91a88f02021-02-04 15:34:32 -050086 // This has to be after GrResourceCache::releaseAll so that other threads that are holding
87 // async pixel result don't try to destroy buffers off thread.
88 fMappedBufferManager.reset();
Robert Phillipsad248452020-06-30 09:27:52 -040089}
Robert Phillipsa3457b82018-03-08 11:30:12 -050090
Adlai Holler61a591c2020-10-12 12:38:33 -040091sk_sp<GrContextThreadSafeProxy> GrDirectContext::threadSafeProxy() {
92 return INHERITED::threadSafeProxy();
93}
94
Adlai Hollera7a40442020-10-09 09:49:42 -040095void GrDirectContext::resetGLTextureBindings() {
96 if (this->abandoned() || this->backend() != GrBackendApi::kOpenGL) {
97 return;
98 }
99 fGpu->resetTextureBindings();
100}
101
102void GrDirectContext::resetContext(uint32_t state) {
103 ASSERT_SINGLE_OWNER
104 fGpu->markContextDirty(state);
105}
106
Robert Phillipsad248452020-06-30 09:27:52 -0400107void GrDirectContext::abandonContext() {
Adlai Hollera7a40442020-10-09 09:49:42 -0400108 if (INHERITED::abandoned()) {
109 return;
110 }
111
Robert Phillipsad248452020-06-30 09:27:52 -0400112 INHERITED::abandonContext();
Adlai Hollera7a40442020-10-09 09:49:42 -0400113
Greg Daniela89b4302021-01-29 10:48:40 -0500114 // We need to make sure all work is finished on the gpu before we start releasing resources.
115 this->syncAllOutstandingGpuWork(this->caps()->mustSyncGpuDuringAbandon());
116
Adlai Hollera7a40442020-10-09 09:49:42 -0400117 fStrikeCache->freeAll();
118
119 fMappedBufferManager->abandon();
120
121 fResourceProvider->abandon();
122
Robert Phillipseb999bc2020-11-03 08:41:47 -0500123 // abandon first so destructors don't try to free the resources in the API.
Adlai Hollera7a40442020-10-09 09:49:42 -0400124 fResourceCache->abandonAll();
125
126 fGpu->disconnect(GrGpu::DisconnectType::kAbandon);
127
Brian Salomon91a88f02021-02-04 15:34:32 -0500128 // Must be after GrResourceCache::abandonAll().
Adlai Hollera7a40442020-10-09 09:49:42 -0400129 fMappedBufferManager.reset();
Brian Salomon91a88f02021-02-04 15:34:32 -0500130
Robert Phillips079455c2020-08-11 15:18:46 -0400131 if (fSmallPathAtlasMgr) {
132 fSmallPathAtlasMgr->reset();
133 }
Robert Phillipsad248452020-06-30 09:27:52 -0400134 fAtlasManager->freeAll();
135}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500136
Adlai Hollera7a40442020-10-09 09:49:42 -0400137bool GrDirectContext::abandoned() {
138 if (INHERITED::abandoned()) {
139 return true;
140 }
141
142 if (fGpu && fGpu->isDeviceLost()) {
143 this->abandonContext();
144 return true;
145 }
146 return false;
147}
148
Adlai Holler61a591c2020-10-12 12:38:33 -0400149bool GrDirectContext::oomed() { return fGpu ? fGpu->checkAndResetOOMed() : false; }
150
Robert Phillipsad248452020-06-30 09:27:52 -0400151void GrDirectContext::releaseResourcesAndAbandonContext() {
Adlai Holler61a591c2020-10-12 12:38:33 -0400152 if (INHERITED::abandoned()) {
153 return;
154 }
155
156 INHERITED::abandonContext();
157
Greg Daniela89b4302021-01-29 10:48:40 -0500158 // We need to make sure all work is finished on the gpu before we start releasing resources.
159 this->syncAllOutstandingGpuWork(/*shouldExecuteWhileAbandoned=*/true);
160
Adlai Holler61a591c2020-10-12 12:38:33 -0400161 fResourceProvider->abandon();
162
163 // Release all resources in the backend 3D API.
164 fResourceCache->releaseAll();
165
Brian Salomon91a88f02021-02-04 15:34:32 -0500166 // Must be after GrResourceCache::releaseAll().
167 fMappedBufferManager.reset();
168
Adlai Holler61a591c2020-10-12 12:38:33 -0400169 fGpu->disconnect(GrGpu::DisconnectType::kCleanup);
Robert Phillips079455c2020-08-11 15:18:46 -0400170 if (fSmallPathAtlasMgr) {
171 fSmallPathAtlasMgr->reset();
172 }
Robert Phillipsad248452020-06-30 09:27:52 -0400173 fAtlasManager->freeAll();
174}
Robert Phillips6db27c22019-05-01 10:43:56 -0400175
Robert Phillipsad248452020-06-30 09:27:52 -0400176void GrDirectContext::freeGpuResources() {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400177 ASSERT_SINGLE_OWNER
178
179 if (this->abandoned()) {
180 return;
181 }
182
Robert Phillipsad248452020-06-30 09:27:52 -0400183 this->flushAndSubmit();
Robert Phillips079455c2020-08-11 15:18:46 -0400184 if (fSmallPathAtlasMgr) {
185 fSmallPathAtlasMgr->reset();
186 }
Robert Phillipsad248452020-06-30 09:27:52 -0400187 fAtlasManager->freeAll();
Robert Phillips56181ba2019-03-08 12:00:45 -0500188
Adlai Holler4aa4c602020-10-12 13:58:52 -0400189 // TODO: the glyph cache doesn't hold any GpuResources so this call should not be needed here.
190 // Some slack in the GrTextBlob's implementation requires it though. That could be fixed.
191 fStrikeCache->freeAll();
192
193 this->drawingManager()->freeGpuResources();
194
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400195 fResourceCache->purgeUnlockedResources();
Robert Phillipsad248452020-06-30 09:27:52 -0400196}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500197
Robert Phillipsad248452020-06-30 09:27:52 -0400198bool GrDirectContext::init() {
Adlai Holler9555f292020-10-09 09:41:14 -0400199 ASSERT_SINGLE_OWNER
200 if (!fGpu) {
Robert Phillipsad248452020-06-30 09:27:52 -0400201 return false;
Robert Phillipsa3457b82018-03-08 11:30:12 -0500202 }
203
Robert Phillipsae67c522021-03-03 11:03:38 -0500204 fThreadSafeProxy->priv().init(fGpu->refCaps(), fGpu->refPipelineBuilder());
Robert Phillipsad248452020-06-30 09:27:52 -0400205 if (!INHERITED::init()) {
206 return false;
207 }
Robert Phillipsa3457b82018-03-08 11:30:12 -0500208
Adlai Holler9555f292020-10-09 09:41:14 -0400209 SkASSERT(this->getTextBlobCache());
210 SkASSERT(this->threadSafeCache());
211
212 fStrikeCache = std::make_unique<GrStrikeCache>();
Robert Phillipsd074b622021-03-15 08:49:24 -0400213 fResourceCache = std::make_unique<GrResourceCache>(this->singleOwner(),
214 this->directContextID(),
215 this->contextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400216 fResourceCache->setProxyProvider(this->proxyProvider());
217 fResourceCache->setThreadSafeCache(this->threadSafeCache());
Adlai Hollerb34270e2021-04-16 11:23:52 -0400218#if GR_TEST_UTILS
219 if (this->options().fResourceCacheLimitOverride != -1) {
220 this->setResourceCacheLimit(this->options().fResourceCacheLimitOverride);
221 }
222#endif
Adlai Holler9555f292020-10-09 09:41:14 -0400223 fResourceProvider = std::make_unique<GrResourceProvider>(fGpu.get(), fResourceCache.get(),
224 this->singleOwner());
Robert Phillips82ad7af2021-03-11 16:00:10 -0500225 fMappedBufferManager = std::make_unique<GrClientMappedBufferManager>(this->directContextID());
Adlai Holler9555f292020-10-09 09:41:14 -0400226
227 fDidTestPMConversions = false;
228
229 // DDL TODO: we need to think through how the task group & persistent cache
230 // get passed on to/shared between all the DDLRecorders created with this context.
231 if (this->options().fExecutor) {
232 fTaskGroup = std::make_unique<SkTaskGroup>(*this->options().fExecutor);
233 }
234
235 fPersistentCache = this->options().fPersistentCache;
Adlai Holler9555f292020-10-09 09:41:14 -0400236
Robert Phillipsad248452020-06-30 09:27:52 -0400237 GrDrawOpAtlas::AllowMultitexturing allowMultitexturing;
238 if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures ||
239 // multitexturing supported only if range can represent the index + texcoords fully
240 !(this->caps()->shaderCaps()->floatIs32Bits() ||
241 this->caps()->shaderCaps()->integerSupport())) {
242 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo;
243 } else {
244 allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes;
245 }
246
247 GrProxyProvider* proxyProvider = this->priv().proxyProvider();
248
Robert Phillips3262bc82020-08-10 12:11:58 -0400249 fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider,
250 this->options().fGlyphCacheTextureMaximumBytes,
251 allowMultitexturing);
252 this->priv().addOnFlushCallbackObject(fAtlasManager.get());
Robert Phillipsad248452020-06-30 09:27:52 -0400253
254 return true;
255}
Robert Phillipsa3457b82018-03-08 11:30:12 -0500256
Adlai Holler3a508e92020-10-12 13:58:01 -0400257void GrDirectContext::getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const {
258 ASSERT_SINGLE_OWNER
259
260 if (resourceCount) {
261 *resourceCount = fResourceCache->getBudgetedResourceCount();
262 }
263 if (resourceBytes) {
264 *resourceBytes = fResourceCache->getBudgetedResourceBytes();
265 }
266}
267
268size_t GrDirectContext::getResourceCachePurgeableBytes() const {
269 ASSERT_SINGLE_OWNER
270 return fResourceCache->getPurgeableBytes();
271}
272
273void GrDirectContext::getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const {
274 ASSERT_SINGLE_OWNER
275 if (maxResources) {
276 *maxResources = -1;
277 }
278 if (maxResourceBytes) {
279 *maxResourceBytes = this->getResourceCacheLimit();
280 }
281}
282
283size_t GrDirectContext::getResourceCacheLimit() const {
284 ASSERT_SINGLE_OWNER
285 return fResourceCache->getMaxResourceBytes();
286}
287
288void GrDirectContext::setResourceCacheLimits(int unused, size_t maxResourceBytes) {
289 ASSERT_SINGLE_OWNER
290 this->setResourceCacheLimit(maxResourceBytes);
291}
292
293void GrDirectContext::setResourceCacheLimit(size_t maxResourceBytes) {
294 ASSERT_SINGLE_OWNER
295 fResourceCache->setLimit(maxResourceBytes);
296}
297
Adlai Holler4aa4c602020-10-12 13:58:52 -0400298void GrDirectContext::purgeUnlockedResources(bool scratchResourcesOnly) {
299 ASSERT_SINGLE_OWNER
300
301 if (this->abandoned()) {
302 return;
303 }
304
305 fResourceCache->purgeUnlockedResources(scratchResourcesOnly);
306 fResourceCache->purgeAsNeeded();
307
308 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
309 // place to purge stale blobs
310 this->getTextBlobCache()->purgeStaleBlobs();
Greg Daniel428523f2021-03-30 14:22:54 -0400311
312 fGpu->releaseUnlockedBackendObjects();
Adlai Holler4aa4c602020-10-12 13:58:52 -0400313}
314
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400315void GrDirectContext::performDeferredCleanup(std::chrono::milliseconds msNotUsed,
316 bool scratchResourcesOnly) {
Adlai Holler4aa4c602020-10-12 13:58:52 -0400317 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
318
319 ASSERT_SINGLE_OWNER
320
321 if (this->abandoned()) {
322 return;
323 }
324
325 this->checkAsyncWorkCompletion();
326 fMappedBufferManager->process();
327 auto purgeTime = GrStdSteadyClock::now() - msNotUsed;
328
329 fResourceCache->purgeAsNeeded();
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400330 fResourceCache->purgeResourcesNotUsedSince(purgeTime, scratchResourcesOnly);
Adlai Holler4aa4c602020-10-12 13:58:52 -0400331
Adlai Holler4aa4c602020-10-12 13:58:52 -0400332 // The textBlob Cache doesn't actually hold any GPU resource but this is a convenient
333 // place to purge stale blobs
334 this->getTextBlobCache()->purgeStaleBlobs();
335}
336
337void GrDirectContext::purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources) {
338 ASSERT_SINGLE_OWNER
339
340 if (this->abandoned()) {
341 return;
342 }
343
344 fResourceCache->purgeUnlockedResources(bytesToPurge, preferScratchResources);
345}
346
Adlai Holler3acc69a2020-10-13 08:20:51 -0400347////////////////////////////////////////////////////////////////////////////////
348bool GrDirectContext::wait(int numSemaphores, const GrBackendSemaphore waitSemaphores[],
349 bool deleteSemaphoresAfterWait) {
Greg Daniel063fdce2021-05-06 19:45:55 +0000350 if (!fGpu || !fGpu->caps()->semaphoreSupport()) {
Adlai Holler3acc69a2020-10-13 08:20:51 -0400351 return false;
352 }
353 GrWrapOwnership ownership =
354 deleteSemaphoresAfterWait ? kAdopt_GrWrapOwnership : kBorrow_GrWrapOwnership;
355 for (int i = 0; i < numSemaphores; ++i) {
356 std::unique_ptr<GrSemaphore> sema = fResourceProvider->wrapBackendSemaphore(
Robert Phillips1a82a4e2021-07-01 10:27:44 -0400357 waitSemaphores[i], GrSemaphoreWrapType::kWillWait, ownership);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400358 // If we failed to wrap the semaphore it means the client didn't give us a valid semaphore
359 // to begin with. Therefore, it is fine to not wait on it.
360 if (sema) {
361 fGpu->waitSemaphore(sema.get());
362 }
363 }
364 return true;
365}
Adlai Holler4aa4c602020-10-12 13:58:52 -0400366
Robert Phillips5edf5102020-08-10 16:30:36 -0400367GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() {
Robert Phillips079455c2020-08-11 15:18:46 -0400368 if (!fSmallPathAtlasMgr) {
369 fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>();
370
371 this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get());
372 }
373
374 if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) {
375 return nullptr;
376 }
377
378 return fSmallPathAtlasMgr.get();
Robert Phillips5edf5102020-08-10 16:30:36 -0400379}
380
Adlai Holler3acc69a2020-10-13 08:20:51 -0400381////////////////////////////////////////////////////////////////////////////////
382
383GrSemaphoresSubmitted GrDirectContext::flush(const GrFlushInfo& info) {
384 ASSERT_SINGLE_OWNER
385 if (this->abandoned()) {
386 if (info.fFinishedProc) {
387 info.fFinishedProc(info.fFinishedContext);
388 }
389 if (info.fSubmittedProc) {
390 info.fSubmittedProc(info.fSubmittedContext, false);
391 }
392 return GrSemaphoresSubmitted::kNo;
393 }
394
Robert Phillips80bfda82020-11-12 09:23:36 -0500395 return this->drawingManager()->flushSurfaces({}, SkSurface::BackendSurfaceAccess::kNoAccess,
396 info, nullptr);
Adlai Holler3acc69a2020-10-13 08:20:51 -0400397}
398
399bool GrDirectContext::submit(bool syncCpu) {
400 ASSERT_SINGLE_OWNER
401 if (this->abandoned()) {
402 return false;
403 }
404
405 if (!fGpu) {
406 return false;
407 }
408
409 return fGpu->submitToGpu(syncCpu);
410}
411
412////////////////////////////////////////////////////////////////////////////////
413
414void GrDirectContext::checkAsyncWorkCompletion() {
415 if (fGpu) {
416 fGpu->checkFinishProcs();
417 }
418}
419
Greg Daniela89b4302021-01-29 10:48:40 -0500420void GrDirectContext::syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned) {
421 if (fGpu && (!this->abandoned() || shouldExecuteWhileAbandoned)) {
422 fGpu->finishOutstandingGpuWork();
423 this->checkAsyncWorkCompletion();
424 }
425}
426
Adlai Holler3acc69a2020-10-13 08:20:51 -0400427////////////////////////////////////////////////////////////////////////////////
428
429void GrDirectContext::storeVkPipelineCacheData() {
430 if (fGpu) {
431 fGpu->storeVkPipelineCacheData();
432 }
433}
434
435////////////////////////////////////////////////////////////////////////////////
436
437bool GrDirectContext::supportsDistanceFieldText() const {
438 return this->caps()->shaderCaps()->supportsDistanceFieldText();
439}
440
441//////////////////////////////////////////////////////////////////////////////
442
443void GrDirectContext::dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const {
444 ASSERT_SINGLE_OWNER
445 fResourceCache->dumpMemoryStatistics(traceMemoryDump);
446 traceMemoryDump->dumpNumericValue("skia/gr_text_blob_cache", "size", "bytes",
447 this->getTextBlobCache()->usedBytes());
448}
449
Adlai Holler98dd0042020-10-13 10:04:00 -0400450GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
451 const GrBackendFormat& backendFormat,
452 GrMipmapped mipMapped,
453 GrRenderable renderable,
454 GrProtected isProtected) {
455 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
456 if (this->abandoned()) {
457 return GrBackendTexture();
458 }
459
460 return fGpu->createBackendTexture({width, height}, backendFormat, renderable,
461 mipMapped, isProtected);
462}
463
464GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
465 SkColorType skColorType,
466 GrMipmapped mipMapped,
467 GrRenderable renderable,
468 GrProtected isProtected) {
469 if (this->abandoned()) {
470 return GrBackendTexture();
471 }
472
473 const GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
474
475 return this->createBackendTexture(width, height, format, mipMapped, renderable, isProtected);
476}
477
Brian Salomon71283232021-04-08 12:45:58 -0400478static GrBackendTexture create_and_clear_backend_texture(GrDirectContext* dContext,
479 SkISize dimensions,
480 const GrBackendFormat& backendFormat,
481 GrMipmapped mipMapped,
482 GrRenderable renderable,
483 GrProtected isProtected,
484 sk_sp<GrRefCntedCallback> finishedCallback,
485 std::array<float, 4> color) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400486 GrGpu* gpu = dContext->priv().getGpu();
Adlai Holler98dd0042020-10-13 10:04:00 -0400487 GrBackendTexture beTex = gpu->createBackendTexture(dimensions, backendFormat, renderable,
488 mipMapped, isProtected);
489 if (!beTex.isValid()) {
490 return {};
491 }
492
Brian Salomon71283232021-04-08 12:45:58 -0400493 if (!dContext->priv().getGpu()->clearBackendTexture(beTex,
494 std::move(finishedCallback),
495 color)) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400496 dContext->deleteBackendTexture(beTex);
497 return {};
498 }
499 return beTex;
500}
501
Brian Salomonea1d39b2021-04-01 17:06:52 -0400502static bool update_texture_with_pixmaps(GrDirectContext* context,
503 const SkPixmap src[],
Brian Salomonb5f880a2020-12-07 11:30:16 -0500504 int numLevels,
505 const GrBackendTexture& backendTexture,
506 GrSurfaceOrigin textureOrigin,
507 sk_sp<GrRefCntedCallback> finishedCallback) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400508 GrColorType ct = SkColorTypeToGrColorType(src[0].colorType());
509 const GrBackendFormat& format = backendTexture.getBackendFormat();
Brian Salomon759217e2021-01-31 13:16:39 -0500510
Brian Salomonea1d39b2021-04-01 17:06:52 -0400511 if (!context->priv().caps()->areColorTypeAndFormatCompatible(ct, format)) {
512 return false;
513 }
514
515 auto proxy = context->priv().proxyProvider()->wrapBackendTexture(backendTexture,
516 kBorrow_GrWrapOwnership,
517 GrWrapCacheable::kNo,
518 kRW_GrIOType,
519 std::move(finishedCallback));
520 if (!proxy) {
521 return false;
522 }
523
524 GrSwizzle swizzle = context->priv().caps()->getReadSwizzle(format, ct);
525 GrSurfaceProxyView view(std::move(proxy), textureOrigin, swizzle);
Robert Phillips53eaa642021-08-10 13:49:51 -0400526 skgpu::SurfaceContext surfaceContext(context, std::move(view), src[0].info().colorInfo());
Brian Salomonea1d39b2021-04-01 17:06:52 -0400527 SkAutoSTArray<15, GrCPixmap> tmpSrc(numLevels);
Brian Salomon759217e2021-01-31 13:16:39 -0500528 for (int i = 0; i < numLevels; ++i) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400529 tmpSrc[i] = src[i];
530 }
Brian Salomon75ee7372021-04-06 15:04:35 -0400531 if (!surfaceContext.writePixels(context, tmpSrc.get(), numLevels)) {
Brian Salomonea1d39b2021-04-01 17:06:52 -0400532 return false;
Brian Salomon759217e2021-01-31 13:16:39 -0500533 }
534
Brian Salomonea1d39b2021-04-01 17:06:52 -0400535 GrSurfaceProxy* p = surfaceContext.asSurfaceProxy();
536 GrFlushInfo info;
537 context->priv().drawingManager()->flushSurfaces({&p, 1},
538 SkSurface::BackendSurfaceAccess::kNoAccess,
539 info,
540 nullptr);
541 return true;
Brian Salomonb5f880a2020-12-07 11:30:16 -0500542}
543
Adlai Holler98dd0042020-10-13 10:04:00 -0400544GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
545 const GrBackendFormat& backendFormat,
546 const SkColor4f& color,
547 GrMipmapped mipMapped,
548 GrRenderable renderable,
549 GrProtected isProtected,
550 GrGpuFinishedProc finishedProc,
551 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500552 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400553
554 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
555 if (this->abandoned()) {
556 return {};
557 }
558
Brian Salomon71283232021-04-08 12:45:58 -0400559 return create_and_clear_backend_texture(this,
560 {width, height},
561 backendFormat,
562 mipMapped,
563 renderable,
564 isProtected,
565 std::move(finishedCallback),
566 color.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400567}
568
569GrBackendTexture GrDirectContext::createBackendTexture(int width, int height,
570 SkColorType skColorType,
571 const SkColor4f& color,
572 GrMipmapped mipMapped,
573 GrRenderable renderable,
574 GrProtected isProtected,
575 GrGpuFinishedProc finishedProc,
576 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500577 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400578
579 if (this->abandoned()) {
580 return {};
581 }
582
583 GrBackendFormat format = this->defaultBackendFormat(skColorType, renderable);
584 if (!format.isValid()) {
585 return {};
586 }
587
588 GrColorType grColorType = SkColorTypeToGrColorType(skColorType);
589 SkColor4f swizzledColor = this->caps()->getWriteSwizzle(format, grColorType).applyTo(color);
590
Brian Salomon71283232021-04-08 12:45:58 -0400591 return create_and_clear_backend_texture(this,
592 {width, height},
593 format,
594 mipMapped,
595 renderable,
596 isProtected,
597 std::move(finishedCallback),
598 swizzledColor.array());
Adlai Holler98dd0042020-10-13 10:04:00 -0400599}
600
601GrBackendTexture GrDirectContext::createBackendTexture(const SkPixmap srcData[],
602 int numProvidedLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500603 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400604 GrRenderable renderable,
605 GrProtected isProtected,
606 GrGpuFinishedProc finishedProc,
607 GrGpuFinishedContext finishedContext) {
608 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
609
Brian Salomon694ff172020-11-04 16:54:28 -0500610 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400611
612 if (this->abandoned()) {
613 return {};
614 }
615
616 if (!srcData || numProvidedLevels <= 0) {
617 return {};
618 }
619
Adlai Holler98dd0042020-10-13 10:04:00 -0400620 SkColorType colorType = srcData[0].colorType();
621
622 GrMipmapped mipMapped = GrMipmapped::kNo;
Adlai Holler98dd0042020-10-13 10:04:00 -0400623 if (numProvidedLevels > 1) {
Adlai Holler98dd0042020-10-13 10:04:00 -0400624 mipMapped = GrMipmapped::kYes;
625 }
626
Adlai Holler98dd0042020-10-13 10:04:00 -0400627 GrBackendFormat backendFormat = this->defaultBackendFormat(colorType, renderable);
Brian Salomonb5f880a2020-12-07 11:30:16 -0500628 GrBackendTexture beTex = this->createBackendTexture(srcData[0].width(),
629 srcData[0].height(),
630 backendFormat,
631 mipMapped,
632 renderable,
633 isProtected);
634 if (!beTex.isValid()) {
635 return {};
636 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400637 if (!update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500638 srcData,
639 numProvidedLevels,
640 beTex,
641 textureOrigin,
642 std::move(finishedCallback))) {
643 this->deleteBackendTexture(beTex);
644 return {};
645 }
646 return beTex;
Adlai Holler98dd0042020-10-13 10:04:00 -0400647}
648
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400649bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
650 const SkColor4f& color,
651 GrGpuFinishedProc finishedProc,
652 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500653 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400654
655 if (this->abandoned()) {
656 return false;
657 }
658
Brian Salomon71283232021-04-08 12:45:58 -0400659 return fGpu->clearBackendTexture(backendTexture, std::move(finishedCallback), color.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400660}
661
662bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
663 SkColorType skColorType,
664 const SkColor4f& color,
665 GrGpuFinishedProc finishedProc,
666 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500667 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400668
669 if (this->abandoned()) {
670 return false;
671 }
672
673 GrBackendFormat format = backendTexture.getBackendFormat();
674 GrColorType grColorType = SkColorTypeAndFormatToGrColorType(this->caps(), skColorType, format);
675
676 if (!this->caps()->areColorTypeAndFormatCompatible(grColorType, format)) {
677 return false;
678 }
679
680 GrSwizzle swizzle = this->caps()->getWriteSwizzle(format, grColorType);
Brian Salomon71283232021-04-08 12:45:58 -0400681 SkColor4f swizzledColor = swizzle.applyTo(color);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400682
Brian Salomon71283232021-04-08 12:45:58 -0400683 return fGpu->clearBackendTexture(backendTexture,
684 std::move(finishedCallback),
685 swizzledColor.array());
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400686}
687
688bool GrDirectContext::updateBackendTexture(const GrBackendTexture& backendTexture,
689 const SkPixmap srcData[],
690 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500691 GrSurfaceOrigin textureOrigin,
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400692 GrGpuFinishedProc finishedProc,
693 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500694 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400695
696 if (this->abandoned()) {
697 return false;
698 }
699
700 if (!srcData || numLevels <= 0) {
701 return false;
702 }
703
Brian Salomonea1d39b2021-04-01 17:06:52 -0400704 // If the texture has MIP levels then we require that the full set is overwritten.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400705 int numExpectedLevels = 1;
706 if (backendTexture.hasMipmaps()) {
707 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTexture.width(),
708 backendTexture.height()) + 1;
709 }
710 if (numLevels != numExpectedLevels) {
711 return false;
712 }
Brian Salomonea1d39b2021-04-01 17:06:52 -0400713 return update_texture_with_pixmaps(this,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500714 srcData,
715 numLevels,
716 backendTexture,
717 textureOrigin,
718 std::move(finishedCallback));
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400719}
720
Adlai Holler64e13832020-10-13 08:21:56 -0400721//////////////////////////////////////////////////////////////////////////////
722
723static GrBackendTexture create_and_update_compressed_backend_texture(
724 GrDirectContext* dContext,
725 SkISize dimensions,
726 const GrBackendFormat& backendFormat,
727 GrMipmapped mipMapped,
728 GrProtected isProtected,
729 sk_sp<GrRefCntedCallback> finishedCallback,
Brian Salomon71283232021-04-08 12:45:58 -0400730 const void* data,
731 size_t size) {
Adlai Holler64e13832020-10-13 08:21:56 -0400732 GrGpu* gpu = dContext->priv().getGpu();
733
734 GrBackendTexture beTex = gpu->createCompressedBackendTexture(dimensions, backendFormat,
735 mipMapped, isProtected);
736 if (!beTex.isValid()) {
737 return {};
738 }
739
740 if (!dContext->priv().getGpu()->updateCompressedBackendTexture(
Brian Salomon71283232021-04-08 12:45:58 -0400741 beTex, std::move(finishedCallback), data, size)) {
Adlai Holler64e13832020-10-13 08:21:56 -0400742 dContext->deleteBackendTexture(beTex);
743 return {};
744 }
745 return beTex;
746}
747
Brian Salomon71283232021-04-08 12:45:58 -0400748GrBackendTexture GrDirectContext::createCompressedBackendTexture(
749 int width, int height,
750 const GrBackendFormat& backendFormat,
751 const SkColor4f& color,
752 GrMipmapped mipmapped,
753 GrProtected isProtected,
754 GrGpuFinishedProc finishedProc,
755 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400756 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500757 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400758
759 if (this->abandoned()) {
760 return {};
761 }
762
Brian Salomon71283232021-04-08 12:45:58 -0400763 SkImage::CompressionType compression = GrBackendFormatToCompressionType(backendFormat);
764 if (compression == SkImage::CompressionType::kNone) {
765 return {};
766 }
767
768 size_t size = SkCompressedDataSize(compression,
769 {width, height},
770 nullptr,
771 mipmapped == GrMipmapped::kYes);
772 auto storage = std::make_unique<char[]>(size);
773 GrFillInCompressedData(compression, {width, height}, mipmapped, storage.get(), color);
774 return create_and_update_compressed_backend_texture(this,
775 {width, height},
776 backendFormat,
777 mipmapped,
778 isProtected,
779 std::move(finishedCallback),
780 storage.get(),
781 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400782}
783
Brian Salomon71283232021-04-08 12:45:58 -0400784GrBackendTexture GrDirectContext::createCompressedBackendTexture(
785 int width, int height,
786 SkImage::CompressionType compression,
787 const SkColor4f& color,
788 GrMipmapped mipMapped,
789 GrProtected isProtected,
790 GrGpuFinishedProc finishedProc,
791 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400792 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
793 GrBackendFormat format = this->compressedBackendFormat(compression);
794 return this->createCompressedBackendTexture(width, height, format, color,
795 mipMapped, isProtected, finishedProc,
796 finishedContext);
797}
798
Brian Salomon71283232021-04-08 12:45:58 -0400799GrBackendTexture GrDirectContext::createCompressedBackendTexture(
800 int width, int height,
801 const GrBackendFormat& backendFormat,
802 const void* compressedData,
803 size_t dataSize,
804 GrMipmapped mipMapped,
805 GrProtected isProtected,
806 GrGpuFinishedProc finishedProc,
807 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400808 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Brian Salomon694ff172020-11-04 16:54:28 -0500809 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400810
811 if (this->abandoned()) {
812 return {};
813 }
814
Brian Salomon71283232021-04-08 12:45:58 -0400815 return create_and_update_compressed_backend_texture(this,
816 {width, height},
817 backendFormat,
818 mipMapped,
819 isProtected,
820 std::move(finishedCallback),
821 compressedData,
822 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400823}
824
Brian Salomon71283232021-04-08 12:45:58 -0400825GrBackendTexture GrDirectContext::createCompressedBackendTexture(
826 int width, int height,
827 SkImage::CompressionType compression,
828 const void* data, size_t dataSize,
829 GrMipmapped mipMapped,
830 GrProtected isProtected,
831 GrGpuFinishedProc finishedProc,
832 GrGpuFinishedContext finishedContext) {
Adlai Holler64e13832020-10-13 08:21:56 -0400833 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
834 GrBackendFormat format = this->compressedBackendFormat(compression);
835 return this->createCompressedBackendTexture(width, height, format, data, dataSize, mipMapped,
836 isProtected, finishedProc, finishedContext);
837}
838
839bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
840 const SkColor4f& color,
841 GrGpuFinishedProc finishedProc,
842 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500843 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400844
845 if (this->abandoned()) {
846 return false;
847 }
848
Brian Salomon71283232021-04-08 12:45:58 -0400849 SkImage::CompressionType compression =
850 GrBackendFormatToCompressionType(backendTexture.getBackendFormat());
851 if (compression == SkImage::CompressionType::kNone) {
852 return {};
853 }
854 size_t size = SkCompressedDataSize(compression,
855 backendTexture.dimensions(),
856 nullptr,
857 backendTexture.hasMipmaps());
858 SkAutoMalloc storage(size);
859 GrFillInCompressedData(compression,
860 backendTexture.dimensions(),
861 backendTexture.mipmapped(),
862 static_cast<char*>(storage.get()),
863 color);
864 return fGpu->updateCompressedBackendTexture(backendTexture,
865 std::move(finishedCallback),
866 storage.get(),
867 size);
Adlai Holler64e13832020-10-13 08:21:56 -0400868}
869
870bool GrDirectContext::updateCompressedBackendTexture(const GrBackendTexture& backendTexture,
871 const void* compressedData,
872 size_t dataSize,
873 GrGpuFinishedProc finishedProc,
874 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500875 auto finishedCallback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler64e13832020-10-13 08:21:56 -0400876
877 if (this->abandoned()) {
878 return false;
879 }
880
881 if (!compressedData) {
882 return false;
883 }
884
Brian Salomon71283232021-04-08 12:45:58 -0400885 return fGpu->updateCompressedBackendTexture(backendTexture,
886 std::move(finishedCallback),
887 compressedData,
888 dataSize);
Adlai Holler64e13832020-10-13 08:21:56 -0400889}
890
Adlai Holler6d0745b2020-10-13 13:29:00 -0400891//////////////////////////////////////////////////////////////////////////////
892
893bool GrDirectContext::setBackendTextureState(const GrBackendTexture& backendTexture,
894 const GrBackendSurfaceMutableState& state,
895 GrBackendSurfaceMutableState* previousState,
896 GrGpuFinishedProc finishedProc,
897 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500898 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400899
900 if (this->abandoned()) {
901 return false;
902 }
903
904 return fGpu->setBackendTextureState(backendTexture, state, previousState, std::move(callback));
905}
906
907
908bool GrDirectContext::setBackendRenderTargetState(const GrBackendRenderTarget& backendRenderTarget,
909 const GrBackendSurfaceMutableState& state,
910 GrBackendSurfaceMutableState* previousState,
911 GrGpuFinishedProc finishedProc,
912 GrGpuFinishedContext finishedContext) {
Brian Salomon694ff172020-11-04 16:54:28 -0500913 auto callback = GrRefCntedCallback::Make(finishedProc, finishedContext);
Adlai Holler6d0745b2020-10-13 13:29:00 -0400914
915 if (this->abandoned()) {
916 return false;
917 }
918
919 return fGpu->setBackendRenderTargetState(backendRenderTarget, state, previousState,
920 std::move(callback));
921}
922
923void GrDirectContext::deleteBackendTexture(GrBackendTexture backendTex) {
924 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
925 // For the Vulkan backend we still must destroy the backend texture when the context is
926 // abandoned.
927 if ((this->abandoned() && this->backend() != GrBackendApi::kVulkan) || !backendTex.isValid()) {
928 return;
929 }
930
931 fGpu->deleteBackendTexture(backendTex);
932}
933
934//////////////////////////////////////////////////////////////////////////////
935
936bool GrDirectContext::precompileShader(const SkData& key, const SkData& data) {
937 return fGpu->precompileShader(key, data);
938}
939
940#ifdef SK_ENABLE_DUMP_GPU
941#include "include/core/SkString.h"
942#include "src/utils/SkJSONWriter.h"
943SkString GrDirectContext::dump() const {
944 SkDynamicMemoryWStream stream;
945 SkJSONWriter writer(&stream, SkJSONWriter::Mode::kPretty);
946 writer.beginObject();
947
948 writer.appendString("backend", GrBackendApiToStr(this->backend()));
949
950 writer.appendName("caps");
951 this->caps()->dumpJSON(&writer);
952
953 writer.appendName("gpu");
954 this->fGpu->dumpJSON(&writer);
955
956 writer.appendName("context");
957 this->dumpJSON(&writer);
958
959 // Flush JSON to the memory stream
960 writer.endObject();
961 writer.flush();
962
963 // Null terminate the JSON data in the memory stream
964 stream.write8(0);
965
966 // Allocate a string big enough to hold all the data, then copy out of the stream
967 SkString result(stream.bytesWritten());
968 stream.copyToAndReset(result.writable_str());
969 return result;
970}
971#endif
972
John Rosascoa9b348f2019-11-08 13:18:15 -0800973#ifdef SK_GL
Robert Phillipsc7228c62020-07-14 12:57:39 -0400974
Robert Phillipsf4f80112020-07-13 16:13:31 -0400975/*************************************************************************************************/
976sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) {
Robert Phillipsa3457b82018-03-08 11:30:12 -0500977 GrContextOptions defaultOptions;
Jim Van Verth03b8ab22020-02-24 11:36:15 -0500978 return MakeGL(std::move(glInterface), defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -0500979}
980
Robert Phillipsf4f80112020-07-13 16:13:31 -0400981sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400982 return MakeGL(nullptr, options);
983}
984
Robert Phillipsf4f80112020-07-13 16:13:31 -0400985sk_sp<GrDirectContext> GrDirectContext::MakeGL() {
Brian Salomonc1b9c102018-04-06 09:18:00 -0400986 GrContextOptions defaultOptions;
987 return MakeGL(nullptr, defaultOptions);
988}
989
Brian Salomon24069eb2020-06-24 10:19:52 -0400990#if GR_TEST_UTILS
991GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) {
992 // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a
993 // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and
994 // capture that by pointer. However, GrGLFunction doesn't support calling a destructor
995 // on the thing it captures. So we leak the context.
996 struct GetErrorContext {
997 SkRandom fRandom;
998 GrGLFunction<GrGLGetErrorFn> fGetError;
999 };
1000
1001 auto errorContext = new GetErrorContext;
1002
1003#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
1004 __lsan_ignore_object(errorContext);
1005#endif
1006
1007 errorContext->fGetError = original;
1008
1009 return GrGLFunction<GrGLGetErrorFn>([errorContext]() {
1010 GrGLenum error = errorContext->fGetError();
1011 if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) {
1012 error = GR_GL_OUT_OF_MEMORY;
1013 }
1014 return error;
1015 });
1016}
1017#endif
1018
Robert Phillipsf4f80112020-07-13 16:13:31 -04001019sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface,
1020 const GrContextOptions& options) {
1021 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options));
Brian Salomon24069eb2020-06-24 10:19:52 -04001022#if GR_TEST_UTILS
1023 if (options.fRandomGLOOM) {
1024 auto copy = sk_make_sp<GrGLInterface>(*glInterface);
1025 copy->fFunctions.fGetError =
1026 make_get_error_with_random_oom(glInterface->fFunctions.fGetError);
1027#if GR_GL_CHECK_ERROR
1028 // Suppress logging GL errors since we'll be synthetically generating them.
1029 copy->suppressErrorLogging();
1030#endif
1031 glInterface = std::move(copy);
1032 }
1033#endif
Robert Phillipsf4f80112020-07-13 16:13:31 -04001034 direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get());
1035 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001036 return nullptr;
1037 }
Robert Phillipsf4f80112020-07-13 16:13:31 -04001038 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001039}
John Rosascoa9b348f2019-11-08 13:18:15 -08001040#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001041
Robert Phillipsf4f80112020-07-13 16:13:31 -04001042/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001043sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) {
1044 GrContextOptions defaultOptions;
1045 return MakeMock(mockOptions, defaultOptions);
1046}
1047
1048sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions,
1049 const GrContextOptions& options) {
1050 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options));
1051
1052 direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get());
1053 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001054 return nullptr;
1055 }
Chris Daltona378b452019-12-11 13:24:11 -05001056
Robert Phillipsf4f80112020-07-13 16:13:31 -04001057 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001058}
1059
Greg Danielb4d89562018-10-03 18:44:49 +00001060#ifdef SK_VULKAN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001061/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001062sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) {
1063 GrContextOptions defaultOptions;
1064 return MakeVulkan(backendContext, defaultOptions);
1065}
1066
1067sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext,
1068 const GrContextOptions& options) {
1069 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options));
1070
1071 direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get());
1072 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001073 return nullptr;
1074 }
1075
Robert Phillipsf4f80112020-07-13 16:13:31 -04001076 return direct;
Greg Danielb4d89562018-10-03 18:44:49 +00001077}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001078#endif
Robert Phillipsa3457b82018-03-08 11:30:12 -05001079
1080#ifdef SK_METAL
Robert Phillipsf4f80112020-07-13 16:13:31 -04001081/*************************************************************************************************/
Jim Van Verth351c9b52020-11-12 15:21:11 -05001082sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001083 GrContextOptions defaultOptions;
Jim Van Verth351c9b52020-11-12 15:21:11 -05001084 return MakeMetal(backendContext, defaultOptions);
Robert Phillipsa3457b82018-03-08 11:30:12 -05001085}
1086
Jim Van Verth351c9b52020-11-12 15:21:11 -05001087sk_sp<GrDirectContext> GrDirectContext::MakeMetal(const GrMtlBackendContext& backendContext,
1088 const GrContextOptions& options) {
Robert Phillipsf4f80112020-07-13 16:13:31 -04001089 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
Robert Phillipsa3457b82018-03-08 11:30:12 -05001090
Jim Van Verth351c9b52020-11-12 15:21:11 -05001091 direct->fGpu = GrMtlTrampoline::MakeGpu(backendContext, options, direct.get());
Robert Phillipsf4f80112020-07-13 16:13:31 -04001092 if (!direct->init()) {
Robert Phillipsa3457b82018-03-08 11:30:12 -05001093 return nullptr;
1094 }
Timothy Liang4e85e802018-06-28 16:37:18 -04001095
Robert Phillipsf4f80112020-07-13 16:13:31 -04001096 return direct;
Robert Phillipsa3457b82018-03-08 11:30:12 -05001097}
Jim Van Verth351c9b52020-11-12 15:21:11 -05001098
1099// deprecated
1100sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) {
1101 GrContextOptions defaultOptions;
1102 return MakeMetal(device, queue, defaultOptions);
1103}
1104
1105// deprecated
1106// remove include/gpu/mtl/GrMtlBackendContext.h, above, when removed
1107sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue,
1108 const GrContextOptions& options) {
1109 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options));
1110 GrMtlBackendContext backendContext = {};
1111 backendContext.fDevice.reset(device);
1112 backendContext.fQueue.reset(queue);
1113
1114 return GrDirectContext::MakeMetal(backendContext, options);
1115}
Robert Phillipsa3457b82018-03-08 11:30:12 -05001116#endif
1117
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001118#ifdef SK_DIRECT3D
Robert Phillipsf4f80112020-07-13 16:13:31 -04001119/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001120sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) {
1121 GrContextOptions defaultOptions;
1122 return MakeDirect3D(backendContext, defaultOptions);
1123}
1124
1125sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext,
1126 const GrContextOptions& options) {
1127 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options));
1128
1129 direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get());
1130 if (!direct->init()) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001131 return nullptr;
1132 }
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001133
Robert Phillipsf4f80112020-07-13 16:13:31 -04001134 return direct;
Jim Van Verthb01e12b2020-02-18 14:34:38 -05001135}
1136#endif
1137
Stephen White985741a2019-07-18 11:43:45 -04001138#ifdef SK_DAWN
Robert Phillipsf4f80112020-07-13 16:13:31 -04001139/*************************************************************************************************/
Robert Phillipsf4f80112020-07-13 16:13:31 -04001140sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) {
Stephen White985741a2019-07-18 11:43:45 -04001141 GrContextOptions defaultOptions;
1142 return MakeDawn(device, defaultOptions);
1143}
1144
Robert Phillipsf4f80112020-07-13 16:13:31 -04001145sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device,
1146 const GrContextOptions& options) {
1147 sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options));
Stephen White985741a2019-07-18 11:43:45 -04001148
Robert Phillipsf4f80112020-07-13 16:13:31 -04001149 direct->fGpu = GrDawnGpu::Make(device, options, direct.get());
1150 if (!direct->init()) {
Stephen White985741a2019-07-18 11:43:45 -04001151 return nullptr;
1152 }
1153
Robert Phillipsf4f80112020-07-13 16:13:31 -04001154 return direct;
Stephen White985741a2019-07-18 11:43:45 -04001155}
Robert Phillipsf4f80112020-07-13 16:13:31 -04001156
Stephen White985741a2019-07-18 11:43:45 -04001157#endif