blob: 66703bd69f21521a29f261c835301d9b4031ef70 [file] [log] [blame]
Robert Phillipsad248452020-06-30 09:27:52 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrDirectContext_DEFINED
9#define GrDirectContext_DEFINED
10
Adlai Holler9ae860a2020-10-20 10:13:32 -040011#include "include/gpu/GrRecordingContext.h"
Robert Phillipsad248452020-06-30 09:27:52 -040012
Adlai Holler6d0745b2020-10-13 13:29:00 -040013#include "include/gpu/GrBackendSurface.h"
14
15// We shouldn't need this but currently Android is relying on this being include transitively.
16#include "include/core/SkUnPreMultiply.h"
17
Adlai Holler53cf44c2020-10-13 17:40:21 -040018class GrAtlasManager;
Adlai Holler6d0745b2020-10-13 13:29:00 -040019class GrBackendSemaphore;
Adlai Holler53cf44c2020-10-13 17:40:21 -040020class GrClientMappedBufferManager;
Adlai Hollera0693042020-10-14 11:23:11 -040021class GrDirectContextPriv;
Adlai Holler6d0745b2020-10-13 13:29:00 -040022class GrContextThreadSafeProxy;
23struct GrD3DBackendContext;
24class GrFragmentProcessor;
Adlai Holler53cf44c2020-10-13 17:40:21 -040025class GrGpu;
Adlai Holler6d0745b2020-10-13 13:29:00 -040026struct GrGLInterface;
Jim Van Verth351c9b52020-11-12 15:21:11 -050027struct GrMtlBackendContext;
Adlai Holler6d0745b2020-10-13 13:29:00 -040028struct GrMockOptions;
29class GrPath;
Adlai Holler53cf44c2020-10-13 17:40:21 -040030class GrResourceCache;
31class GrSmallPathAtlasMgr;
Adlai Holler53cf44c2020-10-13 17:40:21 -040032class GrResourceProvider;
33class GrStrikeCache;
Adlai Holler6d0745b2020-10-13 13:29:00 -040034class GrSurfaceProxy;
35class GrSwizzle;
36class GrTextureProxy;
37struct GrVkBackendContext;
38
39class SkImage;
40class SkString;
41class SkSurfaceCharacterization;
42class SkSurfaceProps;
Adlai Holler53cf44c2020-10-13 17:40:21 -040043class SkTaskGroup;
Adlai Holler6d0745b2020-10-13 13:29:00 -040044class SkTraceMemoryDump;
Robert Phillipsad248452020-06-30 09:27:52 -040045
Adlai Holler9ae860a2020-10-20 10:13:32 -040046class SK_API GrDirectContext : public GrRecordingContext {
Robert Phillipsad248452020-06-30 09:27:52 -040047public:
Robert Phillipsf4f80112020-07-13 16:13:31 -040048#ifdef SK_GL
49 /**
50 * Creates a GrDirectContext for a backend context. If no GrGLInterface is provided then the
51 * result of GrGLMakeNativeInterface() is used if it succeeds.
52 */
53 static sk_sp<GrDirectContext> MakeGL(sk_sp<const GrGLInterface>, const GrContextOptions&);
54 static sk_sp<GrDirectContext> MakeGL(sk_sp<const GrGLInterface>);
55 static sk_sp<GrDirectContext> MakeGL(const GrContextOptions&);
56 static sk_sp<GrDirectContext> MakeGL();
57#endif
58
59#ifdef SK_VULKAN
60 /**
61 * The Vulkan context (VkQueue, VkDevice, VkInstance) must be kept alive until the returned
62 * GrDirectContext is destroyed. This also means that any objects created with this
63 * GrDirectContext (e.g. SkSurfaces, SkImages, etc.) must also be released as they may hold
64 * refs on the GrDirectContext. Once all these objects and the GrDirectContext are released,
65 * then it is safe to delete the vulkan objects.
66 */
67 static sk_sp<GrDirectContext> MakeVulkan(const GrVkBackendContext&, const GrContextOptions&);
68 static sk_sp<GrDirectContext> MakeVulkan(const GrVkBackendContext&);
69#endif
70
71#ifdef SK_METAL
72 /**
Jim Van Verth351c9b52020-11-12 15:21:11 -050073 * Makes a GrDirectContext which uses Metal as the backend. The GrMtlBackendContext contains a
74 * MTLDevice and MTLCommandQueue which should be used by the backend. These objects must
75 * have their own ref which will be released when the GrMtlBackendContext is destroyed.
76 * Ganesh will take its own ref on the objects which will be released when the GrDirectContext
77 * is destroyed.
78 */
79 static sk_sp<GrDirectContext> MakeMetal(const GrMtlBackendContext&, const GrContextOptions&);
80 static sk_sp<GrDirectContext> MakeMetal(const GrMtlBackendContext&);
81 /**
82 * Deprecated.
83 *
Robert Phillipsf4f80112020-07-13 16:13:31 -040084 * Makes a GrDirectContext which uses Metal as the backend. The device parameter is an
85 * MTLDevice and queue is an MTLCommandQueue which should be used by the backend. These objects
Jim Van Verth351c9b52020-11-12 15:21:11 -050086 * must have a ref on them that can be transferred to Ganesh, which will release the ref
Robert Phillipsf4f80112020-07-13 16:13:31 -040087 * when the GrDirectContext is destroyed.
88 */
89 static sk_sp<GrDirectContext> MakeMetal(void* device, void* queue, const GrContextOptions&);
90 static sk_sp<GrDirectContext> MakeMetal(void* device, void* queue);
91#endif
92
93#ifdef SK_DIRECT3D
94 /**
95 * Makes a GrDirectContext which uses Direct3D as the backend. The Direct3D context
96 * must be kept alive until the returned GrDirectContext is first destroyed or abandoned.
97 */
98 static sk_sp<GrDirectContext> MakeDirect3D(const GrD3DBackendContext&, const GrContextOptions&);
99 static sk_sp<GrDirectContext> MakeDirect3D(const GrD3DBackendContext&);
100#endif
101
102#ifdef SK_DAWN
103 static sk_sp<GrDirectContext> MakeDawn(const wgpu::Device&,
104 const GrContextOptions&);
105 static sk_sp<GrDirectContext> MakeDawn(const wgpu::Device&);
106#endif
107
108 static sk_sp<GrDirectContext> MakeMock(const GrMockOptions*, const GrContextOptions&);
109 static sk_sp<GrDirectContext> MakeMock(const GrMockOptions*);
Robert Phillipsad248452020-06-30 09:27:52 -0400110
111 ~GrDirectContext() override;
112
Adlai Hollera7a40442020-10-09 09:49:42 -0400113 /**
114 * The context normally assumes that no outsider is setting state
115 * within the underlying 3D API's context/device/whatever. This call informs
116 * the context that the state was modified and it should resend. Shouldn't
117 * be called frequently for good performance.
118 * The flag bits, state, is dependent on which backend is used by the
119 * context, either GL or D3D (possible in future).
120 */
121 void resetContext(uint32_t state = kAll_GrBackendState);
122
123 /**
124 * If the backend is GrBackendApi::kOpenGL, then all texture unit/target combinations for which
125 * the context has modified the bound texture will have texture id 0 bound. This does not
126 * flush the context. Calling resetContext() does not change the set that will be bound
127 * to texture id 0 on the next call to resetGLTextureBindings(). After this is called
128 * all unit/target combinations are considered to have unmodified bindings until the context
129 * subsequently modifies them (meaning if this is called twice in a row with no intervening
130 * context usage then the second call is a no-op.)
131 */
132 void resetGLTextureBindings();
133
134 /**
135 * Abandons all GPU resources and assumes the underlying backend 3D API context is no longer
136 * usable. Call this if you have lost the associated GPU context, and thus internal texture,
137 * buffer, etc. references/IDs are now invalid. Calling this ensures that the destructors of the
Adlai Holler98dd0042020-10-13 10:04:00 -0400138 * context and any of its created resource objects will not make backend 3D API calls. Content
Adlai Hollera7a40442020-10-09 09:49:42 -0400139 * rendered but not previously flushed may be lost. After this function is called all subsequent
Adlai Holler98dd0042020-10-13 10:04:00 -0400140 * calls on the context will fail or be no-ops.
Adlai Hollera7a40442020-10-09 09:49:42 -0400141 *
142 * The typical use case for this function is that the underlying 3D context was lost and further
143 * API calls may crash.
144 *
145 * For Vulkan, even if the device becomes lost, the VkQueue, VkDevice, or VkInstance used to
146 * create the context must be kept alive even after abandoning the context. Those objects must
147 * live for the lifetime of the context object itself. The reason for this is so that
148 * we can continue to delete any outstanding GrBackendTextures/RenderTargets which must be
149 * cleaned up even in a device lost state.
150 */
Robert Phillipsad248452020-06-30 09:27:52 -0400151 void abandonContext() override;
152
Adlai Hollera7a40442020-10-09 09:49:42 -0400153 /**
154 * Returns true if the context was abandoned or if the if the backend specific context has
155 * gotten into an unrecoverarble, lost state (e.g. in Vulkan backend if we've gotten a
Adlai Holler64e13832020-10-13 08:21:56 -0400156 * VK_ERROR_DEVICE_LOST). If the backend context is lost, this call will also abandon this
157 * context.
Adlai Hollera7a40442020-10-09 09:49:42 -0400158 */
159 bool abandoned() override;
160
Adlai Holler61a591c2020-10-12 12:38:33 -0400161 // TODO: Remove this from public after migrating Chrome.
162 sk_sp<GrContextThreadSafeProxy> threadSafeProxy();
163
164 /**
165 * Checks if the underlying 3D API reported an out-of-memory error. If this returns true it is
166 * reset and will return false until another out-of-memory error is reported by the 3D API. If
167 * the context is abandoned then this will report false.
168 *
169 * Currently this is implemented for:
170 *
171 * OpenGL [ES] - Note that client calls to glGetError() may swallow GL_OUT_OF_MEMORY errors and
172 * therefore hide the error from Skia. Also, it is not advised to use this in combination with
173 * enabling GrContextOptions::fSkipGLErrorChecks. That option may prevent the context from ever
174 * checking the GL context for OOM.
175 *
176 * Vulkan - Reports true if VK_ERROR_OUT_OF_HOST_MEMORY or VK_ERROR_OUT_OF_DEVICE_MEMORY has
177 * occurred.
178 */
179 bool oomed();
180
181 /**
182 * This is similar to abandonContext() however the underlying 3D context is not yet lost and
183 * the context will cleanup all allocated resources before returning. After returning it will
184 * assume that the underlying context may no longer be valid.
185 *
186 * The typical use case for this function is that the client is going to destroy the 3D context
187 * but can't guarantee that context will be destroyed first (perhaps because it may be ref'ed
188 * elsewhere by either the client or Skia objects).
189 *
190 * For Vulkan, even if the device becomes lost, the VkQueue, VkDevice, or VkInstance used to
191 * create the context must be alive before calling releaseResourcesAndAbandonContext.
192 */
193 void releaseResourcesAndAbandonContext();
Robert Phillipsad248452020-06-30 09:27:52 -0400194
Adlai Holler3a508e92020-10-12 13:58:01 -0400195 ///////////////////////////////////////////////////////////////////////////
196 // Resource Cache
197
198 /** DEPRECATED
199 * Return the current GPU resource cache limits.
200 *
201 * @param maxResources If non-null, will be set to -1.
202 * @param maxResourceBytes If non-null, returns maximum number of bytes of
203 * video memory that can be held in the cache.
204 */
205 void getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const;
206
207 /**
208 * Return the current GPU resource cache limit in bytes.
209 */
210 size_t getResourceCacheLimit() const;
211
212 /**
213 * Gets the current GPU resource cache usage.
214 *
215 * @param resourceCount If non-null, returns the number of resources that are held in the
216 * cache.
217 * @param maxResourceBytes If non-null, returns the total number of bytes of video memory held
218 * in the cache.
219 */
220 void getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const;
221
222 /**
223 * Gets the number of bytes in the cache consumed by purgeable (e.g. unlocked) resources.
224 */
225 size_t getResourceCachePurgeableBytes() const;
226
227 /** DEPRECATED
228 * Specify the GPU resource cache limits. If the current cache exceeds the maxResourceBytes
229 * limit, it will be purged (LRU) to keep the cache within the limit.
230 *
231 * @param maxResources Unused.
232 * @param maxResourceBytes The maximum number of bytes of video memory
233 * that can be held in the cache.
234 */
235 void setResourceCacheLimits(int maxResources, size_t maxResourceBytes);
236
237 /**
238 * Specify the GPU resource cache limit. If the cache currently exceeds this limit,
239 * it will be purged (LRU) to keep the cache within the limit.
240 *
241 * @param maxResourceBytes The maximum number of bytes of video memory
242 * that can be held in the cache.
243 */
244 void setResourceCacheLimit(size_t maxResourceBytes);
245
Adlai Holler4aa4c602020-10-12 13:58:52 -0400246 /**
247 * Frees GPU created by the context. Can be called to reduce GPU memory
248 * pressure.
249 */
250 void freeGpuResources();
251
252 /**
253 * Purge GPU resources that haven't been used in the past 'msNotUsed' milliseconds or are
254 * otherwise marked for deletion, regardless of whether the context is under budget.
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400255 *
256 * If 'scratchResourcesOnly' is true all unlocked scratch resources older than 'msNotUsed' will
257 * be purged but the unlocked resources with persistent data will remain. If
258 * 'scratchResourcesOnly' is false then all unlocked resources older than 'msNotUsed' will be
259 * purged.
260 *
261 * @param msNotUsed Only unlocked resources not used in these last milliseconds
262 * will be cleaned up.
263 * @param scratchResourcesOnly If true only unlocked scratch resources will be purged.
Adlai Holler4aa4c602020-10-12 13:58:52 -0400264 */
Michael Ludwig9d1cc052021-06-09 20:49:48 -0400265 void performDeferredCleanup(std::chrono::milliseconds msNotUsed,
266 bool scratchResourcesOnly=false);
Adlai Holler4aa4c602020-10-12 13:58:52 -0400267
268 // Temporary compatibility API for Android.
269 void purgeResourcesNotUsedInMs(std::chrono::milliseconds msNotUsed) {
270 this->performDeferredCleanup(msNotUsed);
271 }
272
273 /**
274 * Purge unlocked resources from the cache until the the provided byte count has been reached
275 * or we have purged all unlocked resources. The default policy is to purge in LRU order, but
276 * can be overridden to prefer purging scratch resources (in LRU order) prior to purging other
277 * resource types.
278 *
279 * @param maxBytesToPurge the desired number of bytes to be purged.
280 * @param preferScratchResources If true scratch resources will be purged prior to other
281 * resource types.
282 */
283 void purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources);
284
285 /**
286 * This entry point is intended for instances where an app has been backgrounded or
287 * suspended.
288 * If 'scratchResourcesOnly' is true all unlocked scratch resources will be purged but the
289 * unlocked resources with persistent data will remain. If 'scratchResourcesOnly' is false
290 * then all unlocked resources will be purged.
291 * In either case, after the unlocked resources are purged a separate pass will be made to
292 * ensure that resource usage is under budget (i.e., even if 'scratchResourcesOnly' is true
293 * some resources with persistent data may be purged to be under budget).
294 *
295 * @param scratchResourcesOnly If true only unlocked scratch resources will be purged prior
296 * enforcing the budget requirements.
297 */
298 void purgeUnlockedResources(bool scratchResourcesOnly);
299
300 /**
301 * Gets the maximum supported texture size.
302 */
303 using GrRecordingContext::maxTextureSize;
304
305 /**
306 * Gets the maximum supported render target size.
307 */
308 using GrRecordingContext::maxRenderTargetSize;
309
310 /**
311 * Can a SkImage be created with the given color type.
312 */
313 using GrRecordingContext::colorTypeSupportedAsImage;
314
315 /**
316 * Can a SkSurface be created with the given color type. To check whether MSAA is supported
317 * use maxSurfaceSampleCountForColorType().
318 */
319 using GrRecordingContext::colorTypeSupportedAsSurface;
320
321 /**
322 * Gets the maximum supported sample count for a color type. 1 is returned if only non-MSAA
323 * rendering is supported for the color type. 0 is returned if rendering to this color type
324 * is not supported at all.
325 */
326 using GrRecordingContext::maxSurfaceSampleCountForColorType;
Robert Phillipsad248452020-06-30 09:27:52 -0400327
Adlai Holler3acc69a2020-10-13 08:20:51 -0400328 ///////////////////////////////////////////////////////////////////////////
329 // Misc.
330
331 /**
332 * Inserts a list of GPU semaphores that the current GPU-backed API must wait on before
333 * executing any more commands on the GPU. If this call returns false, then the GPU back-end
334 * will not wait on any passed in semaphores, and the client will still own the semaphores,
335 * regardless of the value of deleteSemaphoresAfterWait.
336 *
337 * If deleteSemaphoresAfterWait is false then Skia will not delete the semaphores. In this case
338 * it is the client's responsibility to not destroy or attempt to reuse the semaphores until it
339 * knows that Skia has finished waiting on them. This can be done by using finishedProcs on
340 * flush calls.
341 */
342 bool wait(int numSemaphores, const GrBackendSemaphore* waitSemaphores,
343 bool deleteSemaphoresAfterWait = true);
344
345 /**
346 * Call to ensure all drawing to the context has been flushed and submitted to the underlying 3D
347 * API. This is equivalent to calling GrContext::flush with a default GrFlushInfo followed by
348 * GrContext::submit(syncCpu).
349 */
350 void flushAndSubmit(bool syncCpu = false) {
351 this->flush(GrFlushInfo());
352 this->submit(syncCpu);
353 }
354
355 /**
356 * Call to ensure all drawing to the context has been flushed to underlying 3D API specific
357 * objects. A call to `submit` is always required to ensure work is actually sent to
358 * the gpu. Some specific API details:
359 * GL: Commands are actually sent to the driver, but glFlush is never called. Thus some
360 * sync objects from the flush will not be valid until a submission occurs.
361 *
362 * Vulkan/Metal/D3D/Dawn: Commands are recorded to the backend APIs corresponding command
363 * buffer or encoder objects. However, these objects are not sent to the gpu until a
364 * submission occurs.
365 *
366 * If the return is GrSemaphoresSubmitted::kYes, only initialized GrBackendSemaphores will be
367 * submitted to the gpu during the next submit call (it is possible Skia failed to create a
368 * subset of the semaphores). The client should not wait on these semaphores until after submit
369 * has been called, and must keep them alive until then. If this call returns
370 * GrSemaphoresSubmitted::kNo, the GPU backend will not submit any semaphores to be signaled on
371 * the GPU. Thus the client should not have the GPU wait on any of the semaphores passed in with
372 * the GrFlushInfo. Regardless of whether semaphores were submitted to the GPU or not, the
373 * client is still responsible for deleting any initialized semaphores.
374 * Regardleess of semaphore submission the context will still be flushed. It should be
375 * emphasized that a return value of GrSemaphoresSubmitted::kNo does not mean the flush did not
376 * happen. It simply means there were no semaphores submitted to the GPU. A caller should only
377 * take this as a failure if they passed in semaphores to be submitted.
378 */
379 GrSemaphoresSubmitted flush(const GrFlushInfo& info);
380
381 void flush() { this->flush({}); }
382
383 /**
384 * Submit outstanding work to the gpu from all previously un-submitted flushes. The return
385 * value of the submit will indicate whether or not the submission to the GPU was successful.
386 *
387 * If the call returns true, all previously passed in semaphores in flush calls will have been
388 * submitted to the GPU and they can safely be waited on. The caller should wait on those
389 * semaphores or perform some other global synchronization before deleting the semaphores.
390 *
391 * If it returns false, then those same semaphores will not have been submitted and we will not
392 * try to submit them again. The caller is free to delete the semaphores at any time.
393 *
394 * If the syncCpu flag is true this function will return once the gpu has finished with all
395 * submitted work.
396 */
397 bool submit(bool syncCpu = false);
398
399 /**
400 * Checks whether any asynchronous work is complete and if so calls related callbacks.
401 */
402 void checkAsyncWorkCompletion();
403
404 /** Enumerates all cached GPU resources and dumps their memory to traceMemoryDump. */
405 // Chrome is using this!
406 void dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const;
407
408 bool supportsDistanceFieldText() const;
409
410 void storeVkPipelineCacheData();
411
Adlai Holler3acc69a2020-10-13 08:20:51 -0400412 /**
413 * Retrieve the default GrBackendFormat for a given SkColorType and renderability.
414 * It is guaranteed that this backend format will be the one used by the following
415 * SkColorType and SkSurfaceCharacterization-based createBackendTexture methods.
416 *
417 * The caller should check that the returned format is valid.
418 */
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400419 using GrRecordingContext::defaultBackendFormat;
Adlai Holler98dd0042020-10-13 10:04:00 -0400420
421 /**
422 * The explicitly allocated backend texture API allows clients to use Skia to create backend
423 * objects outside of Skia proper (i.e., Skia's caching system will not know about them.)
424 *
425 * It is the client's responsibility to delete all these objects (using deleteBackendTexture)
426 * before deleting the context used to create them. If the backend is Vulkan, the textures must
427 * be deleted before abandoning the context as well. Additionally, clients should only delete
428 * these objects on the thread for which that context is active.
429 *
430 * The client is responsible for ensuring synchronization between different uses
431 * of the backend object (i.e., wrapping it in a surface, rendering to it, deleting the
432 * surface, rewrapping it in a image and drawing the image will require explicit
433 * synchronization on the client's part).
434 */
435
436 /**
437 * If possible, create an uninitialized backend texture. The client should ensure that the
438 * returned backend texture is valid.
439 * For the Vulkan backend the layout of the created VkImage will be:
440 * VK_IMAGE_LAYOUT_UNDEFINED.
441 */
442 GrBackendTexture createBackendTexture(int width, int height,
443 const GrBackendFormat&,
444 GrMipmapped,
445 GrRenderable,
446 GrProtected = GrProtected::kNo);
447
448 /**
449 * If possible, create an uninitialized backend texture. The client should ensure that the
450 * returned backend texture is valid.
451 * If successful, the created backend texture will be compatible with the provided
452 * SkColorType.
453 * For the Vulkan backend the layout of the created VkImage will be:
454 * VK_IMAGE_LAYOUT_UNDEFINED.
455 */
456 GrBackendTexture createBackendTexture(int width, int height,
457 SkColorType,
458 GrMipmapped,
459 GrRenderable,
460 GrProtected = GrProtected::kNo);
461
462 /**
463 * If possible, create a backend texture initialized to a particular color. The client should
464 * ensure that the returned backend texture is valid. The client can pass in a finishedProc
465 * to be notified when the data has been uploaded by the gpu and the texture can be deleted. The
466 * client is required to call `submit` to send the upload work to the gpu. The
467 * finishedProc will always get called even if we failed to create the GrBackendTexture.
468 * For the Vulkan backend the layout of the created VkImage will be:
469 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
470 */
471 GrBackendTexture createBackendTexture(int width, int height,
472 const GrBackendFormat&,
473 const SkColor4f& color,
474 GrMipmapped,
475 GrRenderable,
476 GrProtected = GrProtected::kNo,
477 GrGpuFinishedProc finishedProc = nullptr,
478 GrGpuFinishedContext finishedContext = nullptr);
479
480 /**
481 * If possible, create a backend texture initialized to a particular color. The client should
482 * ensure that the returned backend texture is valid. The client can pass in a finishedProc
483 * to be notified when the data has been uploaded by the gpu and the texture can be deleted. The
484 * client is required to call `submit` to send the upload work to the gpu. The
485 * finishedProc will always get called even if we failed to create the GrBackendTexture.
486 * If successful, the created backend texture will be compatible with the provided
487 * SkColorType.
488 * For the Vulkan backend the layout of the created VkImage will be:
489 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
490 */
491 GrBackendTexture createBackendTexture(int width, int height,
492 SkColorType,
493 const SkColor4f& color,
494 GrMipmapped,
495 GrRenderable,
496 GrProtected = GrProtected::kNo,
497 GrGpuFinishedProc finishedProc = nullptr,
498 GrGpuFinishedContext finishedContext = nullptr);
499
500 /**
501 * If possible, create a backend texture initialized with the provided pixmap data. The client
502 * should ensure that the returned backend texture is valid. The client can pass in a
503 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
504 * deleted. The client is required to call `submit` to send the upload work to the gpu.
505 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
506 * If successful, the created backend texture will be compatible with the provided
507 * pixmap(s). Compatible, in this case, means that the backend format will be the result
508 * of calling defaultBackendFormat on the base pixmap's colortype. The src data can be deleted
509 * when this call returns.
510 * If numLevels is 1 a non-mipMapped texture will result. If a mipMapped texture is desired
511 * the data for all the mipmap levels must be provided. In the mipmapped case all the
512 * colortypes of the provided pixmaps must be the same. Additionally, all the miplevels
Brian Salomonb5f880a2020-12-07 11:30:16 -0500513 * must be sized correctly (please see SkMipmap::ComputeLevelSize and ComputeLevelCount). The
514 * GrSurfaceOrigin controls whether the pixmap data is vertically flipped in the texture.
Adlai Holler98dd0042020-10-13 10:04:00 -0400515 * Note: the pixmap's alphatypes and colorspaces are ignored.
516 * For the Vulkan backend the layout of the created VkImage will be:
517 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
518 */
Brian Salomonb5f880a2020-12-07 11:30:16 -0500519 GrBackendTexture createBackendTexture(const SkPixmap srcData[],
520 int numLevels,
521 GrSurfaceOrigin,
522 GrRenderable,
523 GrProtected,
Adlai Holler98dd0042020-10-13 10:04:00 -0400524 GrGpuFinishedProc finishedProc = nullptr,
525 GrGpuFinishedContext finishedContext = nullptr);
526
Brian Salomonb5f880a2020-12-07 11:30:16 -0500527 /**
528 * Convenience version createBackendTexture() that takes just a base level pixmap.
529 */
Adlai Holler98dd0042020-10-13 10:04:00 -0400530 GrBackendTexture createBackendTexture(const SkPixmap& srcData,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500531 GrSurfaceOrigin textureOrigin,
Adlai Holler98dd0042020-10-13 10:04:00 -0400532 GrRenderable renderable,
533 GrProtected isProtected,
534 GrGpuFinishedProc finishedProc = nullptr,
535 GrGpuFinishedContext finishedContext = nullptr) {
Brian Salomonb5f880a2020-12-07 11:30:16 -0500536 return this->createBackendTexture(&srcData, 1, textureOrigin, renderable, isProtected,
537 finishedProc, finishedContext);
Adlai Holler98dd0042020-10-13 10:04:00 -0400538 }
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400539
Brian Salomonb5f880a2020-12-07 11:30:16 -0500540 // Deprecated versions that do not take origin and assume top-left.
541 GrBackendTexture createBackendTexture(const SkPixmap srcData[],
542 int numLevels,
543 GrRenderable renderable,
544 GrProtected isProtected,
545 GrGpuFinishedProc finishedProc = nullptr,
546 GrGpuFinishedContext finishedContext = nullptr) {
547 return this->createBackendTexture(srcData,
548 numLevels,
549 kTopLeft_GrSurfaceOrigin,
550 renderable,
551 isProtected,
552 finishedProc,
553 finishedContext);
554 }
555 GrBackendTexture createBackendTexture(const SkPixmap& srcData,
556 GrRenderable renderable,
557 GrProtected isProtected,
558 GrGpuFinishedProc finishedProc = nullptr,
559 GrGpuFinishedContext finishedContext = nullptr) {
560 return this->createBackendTexture(&srcData,
561 1,
562 renderable,
563 isProtected,
564 finishedProc,
565 finishedContext);
566 }
567
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400568 /**
569 * If possible, updates a backend texture to be filled to a particular color. The client should
570 * check the return value to see if the update was successful. The client can pass in a
571 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
572 * deleted. The client is required to call `submit` to send the upload work to the gpu.
573 * The finishedProc will always get called even if we failed to update the GrBackendTexture.
574 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
575 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
576 */
577 bool updateBackendTexture(const GrBackendTexture&,
578 const SkColor4f& color,
579 GrGpuFinishedProc finishedProc,
580 GrGpuFinishedContext finishedContext);
581
582 /**
583 * If possible, updates a backend texture to be filled to a particular color. The data in
584 * GrBackendTexture and passed in color is interpreted with respect to the passed in
585 * SkColorType. The client should check the return value to see if the update was successful.
586 * The client can pass in a finishedProc to be notified when the data has been uploaded by the
587 * gpu and the texture can be deleted. The client is required to call `submit` to send
588 * the upload work to the gpu. The finishedProc will always get called even if we failed to
589 * update the GrBackendTexture.
590 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
591 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
592 */
593 bool updateBackendTexture(const GrBackendTexture&,
594 SkColorType skColorType,
595 const SkColor4f& color,
596 GrGpuFinishedProc finishedProc,
597 GrGpuFinishedContext finishedContext);
598
599 /**
600 * If possible, updates a backend texture filled with the provided pixmap data. The client
601 * should check the return value to see if the update was successful. The client can pass in a
602 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
603 * deleted. The client is required to call `submit` to send the upload work to the gpu.
604 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
605 * The backend texture must be compatible with the provided pixmap(s). Compatible, in this case,
606 * means that the backend format is compatible with the base pixmap's colortype. The src data
607 * can be deleted when this call returns.
608 * If the backend texture is mip mapped, the data for all the mipmap levels must be provided.
609 * In the mipmapped case all the colortypes of the provided pixmaps must be the same.
610 * Additionally, all the miplevels must be sized correctly (please see
Brian Salomonb5f880a2020-12-07 11:30:16 -0500611 * SkMipmap::ComputeLevelSize and ComputeLevelCount). The GrSurfaceOrigin controls whether the
612 * pixmap data is vertically flipped in the texture.
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400613 * Note: the pixmap's alphatypes and colorspaces are ignored.
614 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
615 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
616 */
617 bool updateBackendTexture(const GrBackendTexture&,
618 const SkPixmap srcData[],
619 int numLevels,
Brian Salomonb5f880a2020-12-07 11:30:16 -0500620 GrSurfaceOrigin = kTopLeft_GrSurfaceOrigin,
621 GrGpuFinishedProc finishedProc = nullptr,
622 GrGpuFinishedContext finishedContext = nullptr);
623
624 /**
625 * Convenience version of updateBackendTexture that takes just a base level pixmap.
626 */
627 bool updateBackendTexture(const GrBackendTexture& texture,
628 const SkPixmap& srcData,
629 GrSurfaceOrigin textureOrigin = kTopLeft_GrSurfaceOrigin,
630 GrGpuFinishedProc finishedProc = nullptr,
631 GrGpuFinishedContext finishedContext = nullptr) {
632 return this->updateBackendTexture(texture,
633 &srcData,
634 1,
635 textureOrigin,
636 finishedProc,
637 finishedContext);
638 }
639
640 // Deprecated version that does not take origin and assumes top-left.
641 bool updateBackendTexture(const GrBackendTexture& texture,
642 const SkPixmap srcData[],
643 int numLevels,
644 GrGpuFinishedProc finishedProc,
645 GrGpuFinishedContext finishedContext) {
646 return this->updateBackendTexture(texture,
647 srcData,
648 numLevels,
649 kTopLeft_GrSurfaceOrigin,
650 finishedProc,
651 finishedContext);
652 }
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400653
654 /**
655 * Retrieve the GrBackendFormat for a given SkImage::CompressionType. This is
656 * guaranteed to match the backend format used by the following
657 * createCompressedBackendTexture methods that take a CompressionType.
Robert Phillips1c512232021-08-05 11:37:03 -0400658 *
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400659 * The caller should check that the returned format is valid.
660 */
661 using GrRecordingContext::compressedBackendFormat;
662
Adlai Holler64e13832020-10-13 08:21:56 -0400663 /**
664 *If possible, create a compressed backend texture initialized to a particular color. The
665 * client should ensure that the returned backend texture is valid. The client can pass in a
666 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
667 * deleted. The client is required to call `submit` to send the upload work to the gpu.
668 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
669 * For the Vulkan backend the layout of the created VkImage will be:
670 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
671 */
672 GrBackendTexture createCompressedBackendTexture(int width, int height,
673 const GrBackendFormat&,
674 const SkColor4f& color,
675 GrMipmapped,
676 GrProtected = GrProtected::kNo,
677 GrGpuFinishedProc finishedProc = nullptr,
678 GrGpuFinishedContext finishedContext = nullptr);
679
680 GrBackendTexture createCompressedBackendTexture(int width, int height,
681 SkImage::CompressionType,
682 const SkColor4f& color,
683 GrMipmapped,
684 GrProtected = GrProtected::kNo,
685 GrGpuFinishedProc finishedProc = nullptr,
686 GrGpuFinishedContext finishedContext = nullptr);
687
688 /**
689 * If possible, create a backend texture initialized with the provided raw data. The client
690 * should ensure that the returned backend texture is valid. The client can pass in a
691 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
692 * deleted. The client is required to call `submit` to send the upload work to the gpu.
693 * The finishedProc will always get called even if we failed to create the GrBackendTexture
694 * If numLevels is 1 a non-mipMapped texture will result. If a mipMapped texture is desired
695 * the data for all the mipmap levels must be provided. Additionally, all the miplevels
696 * must be sized correctly (please see SkMipmap::ComputeLevelSize and ComputeLevelCount).
697 * For the Vulkan backend the layout of the created VkImage will be:
698 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
699 */
700 GrBackendTexture createCompressedBackendTexture(int width, int height,
701 const GrBackendFormat&,
702 const void* data, size_t dataSize,
703 GrMipmapped,
704 GrProtected = GrProtected::kNo,
705 GrGpuFinishedProc finishedProc = nullptr,
706 GrGpuFinishedContext finishedContext = nullptr);
707
708 GrBackendTexture createCompressedBackendTexture(int width, int height,
709 SkImage::CompressionType,
710 const void* data, size_t dataSize,
711 GrMipmapped,
712 GrProtected = GrProtected::kNo,
713 GrGpuFinishedProc finishedProc = nullptr,
714 GrGpuFinishedContext finishedContext = nullptr);
715
716 /**
717 * If possible, updates a backend texture filled with the provided color. If the texture is
718 * mipmapped, all levels of the mip chain will be updated to have the supplied color. The client
719 * should check the return value to see if the update was successful. The client can pass in a
720 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
721 * deleted. The client is required to call `submit` to send the upload work to the gpu.
722 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
723 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
724 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
725 */
726 bool updateCompressedBackendTexture(const GrBackendTexture&,
727 const SkColor4f& color,
728 GrGpuFinishedProc finishedProc,
729 GrGpuFinishedContext finishedContext);
730
731 /**
732 * If possible, updates a backend texture filled with the provided raw data. The client
733 * should check the return value to see if the update was successful. The client can pass in a
734 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
735 * deleted. The client is required to call `submit` to send the upload work to the gpu.
736 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
737 * If a mipMapped texture is passed in, the data for all the mipmap levels must be provided.
738 * Additionally, all the miplevels must be sized correctly (please see
739 * SkMipMap::ComputeLevelSize and ComputeLevelCount).
740 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
741 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
742 */
743 bool updateCompressedBackendTexture(const GrBackendTexture&,
744 const void* data,
745 size_t dataSize,
746 GrGpuFinishedProc finishedProc,
747 GrGpuFinishedContext finishedContext);
748
Adlai Holler6d0745b2020-10-13 13:29:00 -0400749 /**
750 * Updates the state of the GrBackendTexture/RenderTarget to have the passed in
751 * GrBackendSurfaceMutableState. All objects that wrap the backend surface (i.e. SkSurfaces and
752 * SkImages) will also be aware of this state change. This call does not submit the state change
753 * to the gpu, but requires the client to call `submit` to send it to the GPU. The work
754 * for this call is ordered linearly with all other calls that require GrContext::submit to be
755 * called (e.g updateBackendTexture and flush). If finishedProc is not null then it will be
756 * called with finishedContext after the state transition is known to have occurred on the GPU.
757 *
758 * See GrBackendSurfaceMutableState to see what state can be set via this call.
759 *
760 * If the backend API is Vulkan, the caller can set the GrBackendSurfaceMutableState's
761 * VkImageLayout to VK_IMAGE_LAYOUT_UNDEFINED or queueFamilyIndex to VK_QUEUE_FAMILY_IGNORED to
762 * tell Skia to not change those respective states.
763 *
764 * If previousState is not null and this returns true, then Skia will have filled in
765 * previousState to have the values of the state before this call.
766 */
767 bool setBackendTextureState(const GrBackendTexture&,
768 const GrBackendSurfaceMutableState&,
769 GrBackendSurfaceMutableState* previousState = nullptr,
770 GrGpuFinishedProc finishedProc = nullptr,
771 GrGpuFinishedContext finishedContext = nullptr);
772 bool setBackendRenderTargetState(const GrBackendRenderTarget&,
773 const GrBackendSurfaceMutableState&,
774 GrBackendSurfaceMutableState* previousState = nullptr,
775 GrGpuFinishedProc finishedProc = nullptr,
776 GrGpuFinishedContext finishedContext = nullptr);
777
778 void deleteBackendTexture(GrBackendTexture);
779
780 // This interface allows clients to pre-compile shaders and populate the runtime program cache.
781 // The key and data blobs should be the ones passed to the PersistentCache, in SkSL format.
782 //
783 // Steps to use this API:
784 //
Adlai Hollerb2705682020-10-20 10:11:53 -0400785 // 1) Create a GrDirectContext as normal, but set fPersistentCache on GrContextOptions to
786 // something that will save the cached shader blobs. Set fShaderCacheStrategy to kSkSL. This
787 // will ensure that the blobs are SkSL, and are suitable for pre-compilation.
Adlai Holler6d0745b2020-10-13 13:29:00 -0400788 // 2) Run your application, and save all of the key/data pairs that are fed to the cache.
789 //
790 // 3) Switch over to shipping your application. Include the key/data pairs from above.
791 // 4) At startup (or any convenient time), call precompileShader for each key/data pair.
792 // This will compile the SkSL to create a GL program, and populate the runtime cache.
793 //
794 // This is only guaranteed to work if the context/device used in step #2 are created in the
795 // same way as the one used in step #4, and the same GrContextOptions are specified.
796 // Using cached shader blobs on a different device or driver are undefined.
797 bool precompileShader(const SkData& key, const SkData& data);
798
799#ifdef SK_ENABLE_DUMP_GPU
800 /** Returns a string with detailed information about the context & GPU, in JSON format. */
801 SkString dump() const;
802#endif
803
Robert Phillipsedff4672021-03-11 09:16:25 -0500804 class DirectContextID {
805 public:
Robert Phillipse7a959d2021-03-11 14:44:42 -0500806 static GrDirectContext::DirectContextID Next();
Robert Phillipsedff4672021-03-11 09:16:25 -0500807
808 DirectContextID() : fID(SK_InvalidUniqueID) {}
809
810 bool operator==(const DirectContextID& that) const { return fID == that.fID; }
811 bool operator!=(const DirectContextID& that) const { return !(*this == that); }
812
813 void makeInvalid() { fID = SK_InvalidUniqueID; }
814 bool isValid() const { return fID != SK_InvalidUniqueID; }
815
816 private:
817 constexpr DirectContextID(uint32_t id) : fID(id) {}
818 uint32_t fID;
819 };
820
821 DirectContextID directContextID() const { return fDirectContextID; }
822
Adlai Holler53cf44c2020-10-13 17:40:21 -0400823 // Provides access to functions that aren't part of the public API.
Adlai Hollera0693042020-10-14 11:23:11 -0400824 GrDirectContextPriv priv();
825 const GrDirectContextPriv priv() const; // NOLINT(readability-const-return-type)
Adlai Holler53cf44c2020-10-13 17:40:21 -0400826
Robert Phillipsad248452020-06-30 09:27:52 -0400827protected:
Robert Phillipsf4f80112020-07-13 16:13:31 -0400828 GrDirectContext(GrBackendApi backend, const GrContextOptions& options);
829
Robert Phillipsad248452020-06-30 09:27:52 -0400830 bool init() override;
831
Adlai Holler53cf44c2020-10-13 17:40:21 -0400832 GrAtlasManager* onGetAtlasManager() { return fAtlasManager.get(); }
833 GrSmallPathAtlasMgr* onGetSmallPathAtlasMgr();
Robert Phillipsad248452020-06-30 09:27:52 -0400834
Robert Phillips44333c52020-06-30 13:28:00 -0400835 GrDirectContext* asDirectContext() override { return this; }
836
Robert Phillipsad248452020-06-30 09:27:52 -0400837private:
Greg Daniela89b4302021-01-29 10:48:40 -0500838 // This call will make sure out work on the GPU is finished and will execute any outstanding
839 // asynchronous work (e.g. calling finished procs, freeing resources, etc.) related to the
840 // outstanding work on the gpu. The main use currently for this function is when tearing down or
841 // abandoning the context.
842 //
843 // When we finish up work on the GPU it could trigger callbacks to the client. In the case we
844 // are abandoning the context we don't want the client to be able to use the GrDirectContext to
845 // issue more commands during the callback. Thus before calling this function we set the
846 // GrDirectContext's state to be abandoned. However, we need to be able to get by the abaonded
847 // check in the call to know that it is safe to execute this. The shouldExecuteWhileAbandoned
848 // bool is used for this signal.
849 void syncAllOutstandingGpuWork(bool shouldExecuteWhileAbandoned);
850
Robert Phillipsedff4672021-03-11 09:16:25 -0500851 const DirectContextID fDirectContextID;
Adlai Holler53cf44c2020-10-13 17:40:21 -0400852 // fTaskGroup must appear before anything that uses it (e.g. fGpu), so that it is destroyed
853 // after all of its users. Clients of fTaskGroup will generally want to ensure that they call
854 // wait() on it as they are being destroyed, to avoid the possibility of pending tasks being
855 // invoked after objects they depend upon have already been destroyed.
856 std::unique_ptr<SkTaskGroup> fTaskGroup;
857 std::unique_ptr<GrStrikeCache> fStrikeCache;
858 sk_sp<GrGpu> fGpu;
859 std::unique_ptr<GrResourceCache> fResourceCache;
860 std::unique_ptr<GrResourceProvider> fResourceProvider;
861
862 bool fDidTestPMConversions;
863 // true if the PM/UPM conversion succeeded; false otherwise
864 bool fPMUPMConversionsRoundTrip;
865
866 GrContextOptions::PersistentCache* fPersistentCache;
Adlai Holler53cf44c2020-10-13 17:40:21 -0400867
868 std::unique_ptr<GrClientMappedBufferManager> fMappedBufferManager;
Robert Phillips3262bc82020-08-10 12:11:58 -0400869 std::unique_ptr<GrAtlasManager> fAtlasManager;
Robert Phillipsad248452020-06-30 09:27:52 -0400870
Robert Phillips079455c2020-08-11 15:18:46 -0400871 std::unique_ptr<GrSmallPathAtlasMgr> fSmallPathAtlasMgr;
Robert Phillips5edf5102020-08-10 16:30:36 -0400872
Adlai Hollera0693042020-10-14 11:23:11 -0400873 friend class GrDirectContextPriv;
Adlai Holler53cf44c2020-10-13 17:40:21 -0400874
Adlai Holler9ae860a2020-10-20 10:13:32 -0400875 using INHERITED = GrRecordingContext;
Robert Phillipsad248452020-06-30 09:27:52 -0400876};
877
878
879#endif