blob: 170b065a9a3c37b99ef3e3376e62e9913db37ea9 [file] [log] [blame]
Robert Phillipsad248452020-06-30 09:27:52 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrDirectContext_DEFINED
9#define GrDirectContext_DEFINED
10
Adlai Holler0ce2c542020-10-06 14:04:35 -040011#include "include/private/GrContext.h"
Robert Phillipsad248452020-06-30 09:27:52 -040012
Adlai Holler6d0745b2020-10-13 13:29:00 -040013#include "include/gpu/GrBackendSurface.h"
14
15// We shouldn't need this but currently Android is relying on this being include transitively.
16#include "include/core/SkUnPreMultiply.h"
17
18class GrBackendSemaphore;
19class GrContextThreadSafeProxy;
20struct GrD3DBackendContext;
21class GrFragmentProcessor;
22struct GrGLInterface;
23struct GrMockOptions;
24class GrPath;
25class GrRenderTargetContext;
26class GrSurfaceProxy;
27class GrSwizzle;
28class GrTextureProxy;
29struct GrVkBackendContext;
30
31class SkImage;
32class SkString;
33class SkSurfaceCharacterization;
34class SkSurfaceProps;
35class SkTraceMemoryDump;
Robert Phillipsad248452020-06-30 09:27:52 -040036
Robert Phillipsc7228c62020-07-14 12:57:39 -040037class SK_API GrDirectContext : public GrContext {
Robert Phillipsad248452020-06-30 09:27:52 -040038public:
Robert Phillipsf4f80112020-07-13 16:13:31 -040039#ifdef SK_GL
40 /**
41 * Creates a GrDirectContext for a backend context. If no GrGLInterface is provided then the
42 * result of GrGLMakeNativeInterface() is used if it succeeds.
43 */
44 static sk_sp<GrDirectContext> MakeGL(sk_sp<const GrGLInterface>, const GrContextOptions&);
45 static sk_sp<GrDirectContext> MakeGL(sk_sp<const GrGLInterface>);
46 static sk_sp<GrDirectContext> MakeGL(const GrContextOptions&);
47 static sk_sp<GrDirectContext> MakeGL();
48#endif
49
50#ifdef SK_VULKAN
51 /**
52 * The Vulkan context (VkQueue, VkDevice, VkInstance) must be kept alive until the returned
53 * GrDirectContext is destroyed. This also means that any objects created with this
54 * GrDirectContext (e.g. SkSurfaces, SkImages, etc.) must also be released as they may hold
55 * refs on the GrDirectContext. Once all these objects and the GrDirectContext are released,
56 * then it is safe to delete the vulkan objects.
57 */
58 static sk_sp<GrDirectContext> MakeVulkan(const GrVkBackendContext&, const GrContextOptions&);
59 static sk_sp<GrDirectContext> MakeVulkan(const GrVkBackendContext&);
60#endif
61
62#ifdef SK_METAL
63 /**
64 * Makes a GrDirectContext which uses Metal as the backend. The device parameter is an
65 * MTLDevice and queue is an MTLCommandQueue which should be used by the backend. These objects
66 * must have a ref on them which can be transferred to Ganesh which will release the ref
67 * when the GrDirectContext is destroyed.
68 */
69 static sk_sp<GrDirectContext> MakeMetal(void* device, void* queue, const GrContextOptions&);
70 static sk_sp<GrDirectContext> MakeMetal(void* device, void* queue);
71#endif
72
73#ifdef SK_DIRECT3D
74 /**
75 * Makes a GrDirectContext which uses Direct3D as the backend. The Direct3D context
76 * must be kept alive until the returned GrDirectContext is first destroyed or abandoned.
77 */
78 static sk_sp<GrDirectContext> MakeDirect3D(const GrD3DBackendContext&, const GrContextOptions&);
79 static sk_sp<GrDirectContext> MakeDirect3D(const GrD3DBackendContext&);
80#endif
81
82#ifdef SK_DAWN
83 static sk_sp<GrDirectContext> MakeDawn(const wgpu::Device&,
84 const GrContextOptions&);
85 static sk_sp<GrDirectContext> MakeDawn(const wgpu::Device&);
86#endif
87
88 static sk_sp<GrDirectContext> MakeMock(const GrMockOptions*, const GrContextOptions&);
89 static sk_sp<GrDirectContext> MakeMock(const GrMockOptions*);
Robert Phillipsad248452020-06-30 09:27:52 -040090
91 ~GrDirectContext() override;
92
Adlai Hollera7a40442020-10-09 09:49:42 -040093 /**
94 * The context normally assumes that no outsider is setting state
95 * within the underlying 3D API's context/device/whatever. This call informs
96 * the context that the state was modified and it should resend. Shouldn't
97 * be called frequently for good performance.
98 * The flag bits, state, is dependent on which backend is used by the
99 * context, either GL or D3D (possible in future).
100 */
101 void resetContext(uint32_t state = kAll_GrBackendState);
102
103 /**
104 * If the backend is GrBackendApi::kOpenGL, then all texture unit/target combinations for which
105 * the context has modified the bound texture will have texture id 0 bound. This does not
106 * flush the context. Calling resetContext() does not change the set that will be bound
107 * to texture id 0 on the next call to resetGLTextureBindings(). After this is called
108 * all unit/target combinations are considered to have unmodified bindings until the context
109 * subsequently modifies them (meaning if this is called twice in a row with no intervening
110 * context usage then the second call is a no-op.)
111 */
112 void resetGLTextureBindings();
113
114 /**
115 * Abandons all GPU resources and assumes the underlying backend 3D API context is no longer
116 * usable. Call this if you have lost the associated GPU context, and thus internal texture,
117 * buffer, etc. references/IDs are now invalid. Calling this ensures that the destructors of the
Adlai Holler98dd0042020-10-13 10:04:00 -0400118 * context and any of its created resource objects will not make backend 3D API calls. Content
Adlai Hollera7a40442020-10-09 09:49:42 -0400119 * rendered but not previously flushed may be lost. After this function is called all subsequent
Adlai Holler98dd0042020-10-13 10:04:00 -0400120 * calls on the context will fail or be no-ops.
Adlai Hollera7a40442020-10-09 09:49:42 -0400121 *
122 * The typical use case for this function is that the underlying 3D context was lost and further
123 * API calls may crash.
124 *
125 * For Vulkan, even if the device becomes lost, the VkQueue, VkDevice, or VkInstance used to
126 * create the context must be kept alive even after abandoning the context. Those objects must
127 * live for the lifetime of the context object itself. The reason for this is so that
128 * we can continue to delete any outstanding GrBackendTextures/RenderTargets which must be
129 * cleaned up even in a device lost state.
130 */
Robert Phillipsad248452020-06-30 09:27:52 -0400131 void abandonContext() override;
132
Adlai Hollera7a40442020-10-09 09:49:42 -0400133 /**
134 * Returns true if the context was abandoned or if the if the backend specific context has
135 * gotten into an unrecoverarble, lost state (e.g. in Vulkan backend if we've gotten a
Adlai Holler64e13832020-10-13 08:21:56 -0400136 * VK_ERROR_DEVICE_LOST). If the backend context is lost, this call will also abandon this
137 * context.
Adlai Hollera7a40442020-10-09 09:49:42 -0400138 */
139 bool abandoned() override;
140
Adlai Holler61a591c2020-10-12 12:38:33 -0400141 // TODO: Remove this from public after migrating Chrome.
142 sk_sp<GrContextThreadSafeProxy> threadSafeProxy();
143
144 /**
145 * Checks if the underlying 3D API reported an out-of-memory error. If this returns true it is
146 * reset and will return false until another out-of-memory error is reported by the 3D API. If
147 * the context is abandoned then this will report false.
148 *
149 * Currently this is implemented for:
150 *
151 * OpenGL [ES] - Note that client calls to glGetError() may swallow GL_OUT_OF_MEMORY errors and
152 * therefore hide the error from Skia. Also, it is not advised to use this in combination with
153 * enabling GrContextOptions::fSkipGLErrorChecks. That option may prevent the context from ever
154 * checking the GL context for OOM.
155 *
156 * Vulkan - Reports true if VK_ERROR_OUT_OF_HOST_MEMORY or VK_ERROR_OUT_OF_DEVICE_MEMORY has
157 * occurred.
158 */
159 bool oomed();
160
161 /**
162 * This is similar to abandonContext() however the underlying 3D context is not yet lost and
163 * the context will cleanup all allocated resources before returning. After returning it will
164 * assume that the underlying context may no longer be valid.
165 *
166 * The typical use case for this function is that the client is going to destroy the 3D context
167 * but can't guarantee that context will be destroyed first (perhaps because it may be ref'ed
168 * elsewhere by either the client or Skia objects).
169 *
170 * For Vulkan, even if the device becomes lost, the VkQueue, VkDevice, or VkInstance used to
171 * create the context must be alive before calling releaseResourcesAndAbandonContext.
172 */
173 void releaseResourcesAndAbandonContext();
Robert Phillipsad248452020-06-30 09:27:52 -0400174
Adlai Holler3a508e92020-10-12 13:58:01 -0400175 ///////////////////////////////////////////////////////////////////////////
176 // Resource Cache
177
178 /** DEPRECATED
179 * Return the current GPU resource cache limits.
180 *
181 * @param maxResources If non-null, will be set to -1.
182 * @param maxResourceBytes If non-null, returns maximum number of bytes of
183 * video memory that can be held in the cache.
184 */
185 void getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const;
186
187 /**
188 * Return the current GPU resource cache limit in bytes.
189 */
190 size_t getResourceCacheLimit() const;
191
192 /**
193 * Gets the current GPU resource cache usage.
194 *
195 * @param resourceCount If non-null, returns the number of resources that are held in the
196 * cache.
197 * @param maxResourceBytes If non-null, returns the total number of bytes of video memory held
198 * in the cache.
199 */
200 void getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const;
201
202 /**
203 * Gets the number of bytes in the cache consumed by purgeable (e.g. unlocked) resources.
204 */
205 size_t getResourceCachePurgeableBytes() const;
206
207 /** DEPRECATED
208 * Specify the GPU resource cache limits. If the current cache exceeds the maxResourceBytes
209 * limit, it will be purged (LRU) to keep the cache within the limit.
210 *
211 * @param maxResources Unused.
212 * @param maxResourceBytes The maximum number of bytes of video memory
213 * that can be held in the cache.
214 */
215 void setResourceCacheLimits(int maxResources, size_t maxResourceBytes);
216
217 /**
218 * Specify the GPU resource cache limit. If the cache currently exceeds this limit,
219 * it will be purged (LRU) to keep the cache within the limit.
220 *
221 * @param maxResourceBytes The maximum number of bytes of video memory
222 * that can be held in the cache.
223 */
224 void setResourceCacheLimit(size_t maxResourceBytes);
225
Adlai Holler4aa4c602020-10-12 13:58:52 -0400226 /**
227 * Frees GPU created by the context. Can be called to reduce GPU memory
228 * pressure.
229 */
230 void freeGpuResources();
231
232 /**
233 * Purge GPU resources that haven't been used in the past 'msNotUsed' milliseconds or are
234 * otherwise marked for deletion, regardless of whether the context is under budget.
235 */
236 void performDeferredCleanup(std::chrono::milliseconds msNotUsed);
237
238 // Temporary compatibility API for Android.
239 void purgeResourcesNotUsedInMs(std::chrono::milliseconds msNotUsed) {
240 this->performDeferredCleanup(msNotUsed);
241 }
242
243 /**
244 * Purge unlocked resources from the cache until the the provided byte count has been reached
245 * or we have purged all unlocked resources. The default policy is to purge in LRU order, but
246 * can be overridden to prefer purging scratch resources (in LRU order) prior to purging other
247 * resource types.
248 *
249 * @param maxBytesToPurge the desired number of bytes to be purged.
250 * @param preferScratchResources If true scratch resources will be purged prior to other
251 * resource types.
252 */
253 void purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources);
254
255 /**
256 * This entry point is intended for instances where an app has been backgrounded or
257 * suspended.
258 * If 'scratchResourcesOnly' is true all unlocked scratch resources will be purged but the
259 * unlocked resources with persistent data will remain. If 'scratchResourcesOnly' is false
260 * then all unlocked resources will be purged.
261 * In either case, after the unlocked resources are purged a separate pass will be made to
262 * ensure that resource usage is under budget (i.e., even if 'scratchResourcesOnly' is true
263 * some resources with persistent data may be purged to be under budget).
264 *
265 * @param scratchResourcesOnly If true only unlocked scratch resources will be purged prior
266 * enforcing the budget requirements.
267 */
268 void purgeUnlockedResources(bool scratchResourcesOnly);
269
270 /**
271 * Gets the maximum supported texture size.
272 */
273 using GrRecordingContext::maxTextureSize;
274
275 /**
276 * Gets the maximum supported render target size.
277 */
278 using GrRecordingContext::maxRenderTargetSize;
279
280 /**
281 * Can a SkImage be created with the given color type.
282 */
283 using GrRecordingContext::colorTypeSupportedAsImage;
284
285 /**
286 * Can a SkSurface be created with the given color type. To check whether MSAA is supported
287 * use maxSurfaceSampleCountForColorType().
288 */
289 using GrRecordingContext::colorTypeSupportedAsSurface;
290
291 /**
292 * Gets the maximum supported sample count for a color type. 1 is returned if only non-MSAA
293 * rendering is supported for the color type. 0 is returned if rendering to this color type
294 * is not supported at all.
295 */
296 using GrRecordingContext::maxSurfaceSampleCountForColorType;
Robert Phillipsad248452020-06-30 09:27:52 -0400297
Adlai Holler3acc69a2020-10-13 08:20:51 -0400298 ///////////////////////////////////////////////////////////////////////////
299 // Misc.
300
301 /**
302 * Inserts a list of GPU semaphores that the current GPU-backed API must wait on before
303 * executing any more commands on the GPU. If this call returns false, then the GPU back-end
304 * will not wait on any passed in semaphores, and the client will still own the semaphores,
305 * regardless of the value of deleteSemaphoresAfterWait.
306 *
307 * If deleteSemaphoresAfterWait is false then Skia will not delete the semaphores. In this case
308 * it is the client's responsibility to not destroy or attempt to reuse the semaphores until it
309 * knows that Skia has finished waiting on them. This can be done by using finishedProcs on
310 * flush calls.
311 */
312 bool wait(int numSemaphores, const GrBackendSemaphore* waitSemaphores,
313 bool deleteSemaphoresAfterWait = true);
314
315 /**
316 * Call to ensure all drawing to the context has been flushed and submitted to the underlying 3D
317 * API. This is equivalent to calling GrContext::flush with a default GrFlushInfo followed by
318 * GrContext::submit(syncCpu).
319 */
320 void flushAndSubmit(bool syncCpu = false) {
321 this->flush(GrFlushInfo());
322 this->submit(syncCpu);
323 }
324
325 /**
326 * Call to ensure all drawing to the context has been flushed to underlying 3D API specific
327 * objects. A call to `submit` is always required to ensure work is actually sent to
328 * the gpu. Some specific API details:
329 * GL: Commands are actually sent to the driver, but glFlush is never called. Thus some
330 * sync objects from the flush will not be valid until a submission occurs.
331 *
332 * Vulkan/Metal/D3D/Dawn: Commands are recorded to the backend APIs corresponding command
333 * buffer or encoder objects. However, these objects are not sent to the gpu until a
334 * submission occurs.
335 *
336 * If the return is GrSemaphoresSubmitted::kYes, only initialized GrBackendSemaphores will be
337 * submitted to the gpu during the next submit call (it is possible Skia failed to create a
338 * subset of the semaphores). The client should not wait on these semaphores until after submit
339 * has been called, and must keep them alive until then. If this call returns
340 * GrSemaphoresSubmitted::kNo, the GPU backend will not submit any semaphores to be signaled on
341 * the GPU. Thus the client should not have the GPU wait on any of the semaphores passed in with
342 * the GrFlushInfo. Regardless of whether semaphores were submitted to the GPU or not, the
343 * client is still responsible for deleting any initialized semaphores.
344 * Regardleess of semaphore submission the context will still be flushed. It should be
345 * emphasized that a return value of GrSemaphoresSubmitted::kNo does not mean the flush did not
346 * happen. It simply means there were no semaphores submitted to the GPU. A caller should only
347 * take this as a failure if they passed in semaphores to be submitted.
348 */
349 GrSemaphoresSubmitted flush(const GrFlushInfo& info);
350
351 void flush() { this->flush({}); }
352
353 /**
354 * Submit outstanding work to the gpu from all previously un-submitted flushes. The return
355 * value of the submit will indicate whether or not the submission to the GPU was successful.
356 *
357 * If the call returns true, all previously passed in semaphores in flush calls will have been
358 * submitted to the GPU and they can safely be waited on. The caller should wait on those
359 * semaphores or perform some other global synchronization before deleting the semaphores.
360 *
361 * If it returns false, then those same semaphores will not have been submitted and we will not
362 * try to submit them again. The caller is free to delete the semaphores at any time.
363 *
364 * If the syncCpu flag is true this function will return once the gpu has finished with all
365 * submitted work.
366 */
367 bool submit(bool syncCpu = false);
368
369 /**
370 * Checks whether any asynchronous work is complete and if so calls related callbacks.
371 */
372 void checkAsyncWorkCompletion();
373
374 /** Enumerates all cached GPU resources and dumps their memory to traceMemoryDump. */
375 // Chrome is using this!
376 void dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const;
377
378 bool supportsDistanceFieldText() const;
379
380 void storeVkPipelineCacheData();
381
382 // Returns the gpu memory size of the the texture that backs the passed in SkImage. Returns 0 if
383 // the SkImage is not texture backed. For external format textures this will also return 0 as we
384 // cannot determine the correct size.
385 static size_t ComputeImageSize(sk_sp<SkImage> image, GrMipmapped, bool useNextPow2 = false);
386
387 /**
388 * Retrieve the default GrBackendFormat for a given SkColorType and renderability.
389 * It is guaranteed that this backend format will be the one used by the following
390 * SkColorType and SkSurfaceCharacterization-based createBackendTexture methods.
391 *
392 * The caller should check that the returned format is valid.
393 */
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400394 using GrRecordingContext::defaultBackendFormat;
Adlai Holler98dd0042020-10-13 10:04:00 -0400395
396 /**
397 * The explicitly allocated backend texture API allows clients to use Skia to create backend
398 * objects outside of Skia proper (i.e., Skia's caching system will not know about them.)
399 *
400 * It is the client's responsibility to delete all these objects (using deleteBackendTexture)
401 * before deleting the context used to create them. If the backend is Vulkan, the textures must
402 * be deleted before abandoning the context as well. Additionally, clients should only delete
403 * these objects on the thread for which that context is active.
404 *
405 * The client is responsible for ensuring synchronization between different uses
406 * of the backend object (i.e., wrapping it in a surface, rendering to it, deleting the
407 * surface, rewrapping it in a image and drawing the image will require explicit
408 * synchronization on the client's part).
409 */
410
411 /**
412 * If possible, create an uninitialized backend texture. The client should ensure that the
413 * returned backend texture is valid.
414 * For the Vulkan backend the layout of the created VkImage will be:
415 * VK_IMAGE_LAYOUT_UNDEFINED.
416 */
417 GrBackendTexture createBackendTexture(int width, int height,
418 const GrBackendFormat&,
419 GrMipmapped,
420 GrRenderable,
421 GrProtected = GrProtected::kNo);
422
423 /**
424 * If possible, create an uninitialized backend texture. The client should ensure that the
425 * returned backend texture is valid.
426 * If successful, the created backend texture will be compatible with the provided
427 * SkColorType.
428 * For the Vulkan backend the layout of the created VkImage will be:
429 * VK_IMAGE_LAYOUT_UNDEFINED.
430 */
431 GrBackendTexture createBackendTexture(int width, int height,
432 SkColorType,
433 GrMipmapped,
434 GrRenderable,
435 GrProtected = GrProtected::kNo);
436
437 /**
438 * If possible, create a backend texture initialized to a particular color. The client should
439 * ensure that the returned backend texture is valid. The client can pass in a finishedProc
440 * to be notified when the data has been uploaded by the gpu and the texture can be deleted. The
441 * client is required to call `submit` to send the upload work to the gpu. The
442 * finishedProc will always get called even if we failed to create the GrBackendTexture.
443 * For the Vulkan backend the layout of the created VkImage will be:
444 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
445 */
446 GrBackendTexture createBackendTexture(int width, int height,
447 const GrBackendFormat&,
448 const SkColor4f& color,
449 GrMipmapped,
450 GrRenderable,
451 GrProtected = GrProtected::kNo,
452 GrGpuFinishedProc finishedProc = nullptr,
453 GrGpuFinishedContext finishedContext = nullptr);
454
455 /**
456 * If possible, create a backend texture initialized to a particular color. The client should
457 * ensure that the returned backend texture is valid. The client can pass in a finishedProc
458 * to be notified when the data has been uploaded by the gpu and the texture can be deleted. The
459 * client is required to call `submit` to send the upload work to the gpu. The
460 * finishedProc will always get called even if we failed to create the GrBackendTexture.
461 * If successful, the created backend texture will be compatible with the provided
462 * SkColorType.
463 * For the Vulkan backend the layout of the created VkImage will be:
464 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
465 */
466 GrBackendTexture createBackendTexture(int width, int height,
467 SkColorType,
468 const SkColor4f& color,
469 GrMipmapped,
470 GrRenderable,
471 GrProtected = GrProtected::kNo,
472 GrGpuFinishedProc finishedProc = nullptr,
473 GrGpuFinishedContext finishedContext = nullptr);
474
475 /**
476 * If possible, create a backend texture initialized with the provided pixmap data. The client
477 * should ensure that the returned backend texture is valid. The client can pass in a
478 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
479 * deleted. The client is required to call `submit` to send the upload work to the gpu.
480 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
481 * If successful, the created backend texture will be compatible with the provided
482 * pixmap(s). Compatible, in this case, means that the backend format will be the result
483 * of calling defaultBackendFormat on the base pixmap's colortype. The src data can be deleted
484 * when this call returns.
485 * If numLevels is 1 a non-mipMapped texture will result. If a mipMapped texture is desired
486 * the data for all the mipmap levels must be provided. In the mipmapped case all the
487 * colortypes of the provided pixmaps must be the same. Additionally, all the miplevels
488 * must be sized correctly (please see SkMipmap::ComputeLevelSize and ComputeLevelCount).
489 * Note: the pixmap's alphatypes and colorspaces are ignored.
490 * For the Vulkan backend the layout of the created VkImage will be:
491 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
492 */
493 GrBackendTexture createBackendTexture(const SkPixmap srcData[], int numLevels,
494 GrRenderable, GrProtected,
495 GrGpuFinishedProc finishedProc = nullptr,
496 GrGpuFinishedContext finishedContext = nullptr);
497
498 // Helper version of above for a single level.
499 GrBackendTexture createBackendTexture(const SkPixmap& srcData,
500 GrRenderable renderable,
501 GrProtected isProtected,
502 GrGpuFinishedProc finishedProc = nullptr,
503 GrGpuFinishedContext finishedContext = nullptr) {
504 return this->createBackendTexture(&srcData, 1, renderable, isProtected, finishedProc,
505 finishedContext);
506 }
Adlai Holler2e0c70d2020-10-13 08:21:37 -0400507
508 /**
509 * If possible, updates a backend texture to be filled to a particular color. The client should
510 * check the return value to see if the update was successful. The client can pass in a
511 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
512 * deleted. The client is required to call `submit` to send the upload work to the gpu.
513 * The finishedProc will always get called even if we failed to update the GrBackendTexture.
514 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
515 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
516 */
517 bool updateBackendTexture(const GrBackendTexture&,
518 const SkColor4f& color,
519 GrGpuFinishedProc finishedProc,
520 GrGpuFinishedContext finishedContext);
521
522 /**
523 * If possible, updates a backend texture to be filled to a particular color. The data in
524 * GrBackendTexture and passed in color is interpreted with respect to the passed in
525 * SkColorType. The client should check the return value to see if the update was successful.
526 * The client can pass in a finishedProc to be notified when the data has been uploaded by the
527 * gpu and the texture can be deleted. The client is required to call `submit` to send
528 * the upload work to the gpu. The finishedProc will always get called even if we failed to
529 * update the GrBackendTexture.
530 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
531 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
532 */
533 bool updateBackendTexture(const GrBackendTexture&,
534 SkColorType skColorType,
535 const SkColor4f& color,
536 GrGpuFinishedProc finishedProc,
537 GrGpuFinishedContext finishedContext);
538
539 /**
540 * If possible, updates a backend texture filled with the provided pixmap data. The client
541 * should check the return value to see if the update was successful. The client can pass in a
542 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
543 * deleted. The client is required to call `submit` to send the upload work to the gpu.
544 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
545 * The backend texture must be compatible with the provided pixmap(s). Compatible, in this case,
546 * means that the backend format is compatible with the base pixmap's colortype. The src data
547 * can be deleted when this call returns.
548 * If the backend texture is mip mapped, the data for all the mipmap levels must be provided.
549 * In the mipmapped case all the colortypes of the provided pixmaps must be the same.
550 * Additionally, all the miplevels must be sized correctly (please see
551 * SkMipmap::ComputeLevelSize and ComputeLevelCount).
552 * Note: the pixmap's alphatypes and colorspaces are ignored.
553 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
554 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
555 */
556 bool updateBackendTexture(const GrBackendTexture&,
557 const SkPixmap srcData[],
558 int numLevels,
559 GrGpuFinishedProc finishedProc,
560 GrGpuFinishedContext finishedContext);
561
562 /**
563 * Retrieve the GrBackendFormat for a given SkImage::CompressionType. This is
564 * guaranteed to match the backend format used by the following
565 * createCompressedBackendTexture methods that take a CompressionType.
566 * The caller should check that the returned format is valid.
567 */
568 using GrRecordingContext::compressedBackendFormat;
569
Adlai Holler64e13832020-10-13 08:21:56 -0400570 /**
571 *If possible, create a compressed backend texture initialized to a particular color. The
572 * client should ensure that the returned backend texture is valid. The client can pass in a
573 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
574 * deleted. The client is required to call `submit` to send the upload work to the gpu.
575 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
576 * For the Vulkan backend the layout of the created VkImage will be:
577 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
578 */
579 GrBackendTexture createCompressedBackendTexture(int width, int height,
580 const GrBackendFormat&,
581 const SkColor4f& color,
582 GrMipmapped,
583 GrProtected = GrProtected::kNo,
584 GrGpuFinishedProc finishedProc = nullptr,
585 GrGpuFinishedContext finishedContext = nullptr);
586
587 GrBackendTexture createCompressedBackendTexture(int width, int height,
588 SkImage::CompressionType,
589 const SkColor4f& color,
590 GrMipmapped,
591 GrProtected = GrProtected::kNo,
592 GrGpuFinishedProc finishedProc = nullptr,
593 GrGpuFinishedContext finishedContext = nullptr);
594
595 /**
596 * If possible, create a backend texture initialized with the provided raw data. The client
597 * should ensure that the returned backend texture is valid. The client can pass in a
598 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
599 * deleted. The client is required to call `submit` to send the upload work to the gpu.
600 * The finishedProc will always get called even if we failed to create the GrBackendTexture
601 * If numLevels is 1 a non-mipMapped texture will result. If a mipMapped texture is desired
602 * the data for all the mipmap levels must be provided. Additionally, all the miplevels
603 * must be sized correctly (please see SkMipmap::ComputeLevelSize and ComputeLevelCount).
604 * For the Vulkan backend the layout of the created VkImage will be:
605 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
606 */
607 GrBackendTexture createCompressedBackendTexture(int width, int height,
608 const GrBackendFormat&,
609 const void* data, size_t dataSize,
610 GrMipmapped,
611 GrProtected = GrProtected::kNo,
612 GrGpuFinishedProc finishedProc = nullptr,
613 GrGpuFinishedContext finishedContext = nullptr);
614
615 GrBackendTexture createCompressedBackendTexture(int width, int height,
616 SkImage::CompressionType,
617 const void* data, size_t dataSize,
618 GrMipmapped,
619 GrProtected = GrProtected::kNo,
620 GrGpuFinishedProc finishedProc = nullptr,
621 GrGpuFinishedContext finishedContext = nullptr);
622
623 /**
624 * If possible, updates a backend texture filled with the provided color. If the texture is
625 * mipmapped, all levels of the mip chain will be updated to have the supplied color. The client
626 * should check the return value to see if the update was successful. The client can pass in a
627 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
628 * deleted. The client is required to call `submit` to send the upload work to the gpu.
629 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
630 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
631 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
632 */
633 bool updateCompressedBackendTexture(const GrBackendTexture&,
634 const SkColor4f& color,
635 GrGpuFinishedProc finishedProc,
636 GrGpuFinishedContext finishedContext);
637
638 /**
639 * If possible, updates a backend texture filled with the provided raw data. The client
640 * should check the return value to see if the update was successful. The client can pass in a
641 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
642 * deleted. The client is required to call `submit` to send the upload work to the gpu.
643 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
644 * If a mipMapped texture is passed in, the data for all the mipmap levels must be provided.
645 * Additionally, all the miplevels must be sized correctly (please see
646 * SkMipMap::ComputeLevelSize and ComputeLevelCount).
647 * For the Vulkan backend after a successful update the layout of the created VkImage will be:
648 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
649 */
650 bool updateCompressedBackendTexture(const GrBackendTexture&,
651 const void* data,
652 size_t dataSize,
653 GrGpuFinishedProc finishedProc,
654 GrGpuFinishedContext finishedContext);
655
Adlai Holler6d0745b2020-10-13 13:29:00 -0400656 /**
657 * Updates the state of the GrBackendTexture/RenderTarget to have the passed in
658 * GrBackendSurfaceMutableState. All objects that wrap the backend surface (i.e. SkSurfaces and
659 * SkImages) will also be aware of this state change. This call does not submit the state change
660 * to the gpu, but requires the client to call `submit` to send it to the GPU. The work
661 * for this call is ordered linearly with all other calls that require GrContext::submit to be
662 * called (e.g updateBackendTexture and flush). If finishedProc is not null then it will be
663 * called with finishedContext after the state transition is known to have occurred on the GPU.
664 *
665 * See GrBackendSurfaceMutableState to see what state can be set via this call.
666 *
667 * If the backend API is Vulkan, the caller can set the GrBackendSurfaceMutableState's
668 * VkImageLayout to VK_IMAGE_LAYOUT_UNDEFINED or queueFamilyIndex to VK_QUEUE_FAMILY_IGNORED to
669 * tell Skia to not change those respective states.
670 *
671 * If previousState is not null and this returns true, then Skia will have filled in
672 * previousState to have the values of the state before this call.
673 */
674 bool setBackendTextureState(const GrBackendTexture&,
675 const GrBackendSurfaceMutableState&,
676 GrBackendSurfaceMutableState* previousState = nullptr,
677 GrGpuFinishedProc finishedProc = nullptr,
678 GrGpuFinishedContext finishedContext = nullptr);
679 bool setBackendRenderTargetState(const GrBackendRenderTarget&,
680 const GrBackendSurfaceMutableState&,
681 GrBackendSurfaceMutableState* previousState = nullptr,
682 GrGpuFinishedProc finishedProc = nullptr,
683 GrGpuFinishedContext finishedContext = nullptr);
684
685 void deleteBackendTexture(GrBackendTexture);
686
687 // This interface allows clients to pre-compile shaders and populate the runtime program cache.
688 // The key and data blobs should be the ones passed to the PersistentCache, in SkSL format.
689 //
690 // Steps to use this API:
691 //
692 // 1) Create a GrContext as normal, but set fPersistentCache on GrContextOptions to something
693 // that will save the cached shader blobs. Set fShaderCacheStrategy to kSkSL. This will
694 // ensure that the blobs are SkSL, and are suitable for pre-compilation.
695 // 2) Run your application, and save all of the key/data pairs that are fed to the cache.
696 //
697 // 3) Switch over to shipping your application. Include the key/data pairs from above.
698 // 4) At startup (or any convenient time), call precompileShader for each key/data pair.
699 // This will compile the SkSL to create a GL program, and populate the runtime cache.
700 //
701 // This is only guaranteed to work if the context/device used in step #2 are created in the
702 // same way as the one used in step #4, and the same GrContextOptions are specified.
703 // Using cached shader blobs on a different device or driver are undefined.
704 bool precompileShader(const SkData& key, const SkData& data);
705
706#ifdef SK_ENABLE_DUMP_GPU
707 /** Returns a string with detailed information about the context & GPU, in JSON format. */
708 SkString dump() const;
709#endif
710
Robert Phillipsad248452020-06-30 09:27:52 -0400711protected:
Robert Phillipsf4f80112020-07-13 16:13:31 -0400712 GrDirectContext(GrBackendApi backend, const GrContextOptions& options);
713
Robert Phillipsad248452020-06-30 09:27:52 -0400714 bool init() override;
715
Robert Phillips3262bc82020-08-10 12:11:58 -0400716 GrAtlasManager* onGetAtlasManager() override { return fAtlasManager.get(); }
Robert Phillips5edf5102020-08-10 16:30:36 -0400717 GrSmallPathAtlasMgr* onGetSmallPathAtlasMgr() override;
Robert Phillipsad248452020-06-30 09:27:52 -0400718
Robert Phillips44333c52020-06-30 13:28:00 -0400719 GrDirectContext* asDirectContext() override { return this; }
720
Robert Phillipsad248452020-06-30 09:27:52 -0400721private:
Robert Phillips3262bc82020-08-10 12:11:58 -0400722 std::unique_ptr<GrAtlasManager> fAtlasManager;
Robert Phillipsad248452020-06-30 09:27:52 -0400723
Robert Phillips079455c2020-08-11 15:18:46 -0400724 std::unique_ptr<GrSmallPathAtlasMgr> fSmallPathAtlasMgr;
Robert Phillips5edf5102020-08-10 16:30:36 -0400725
John Stiles7571f9e2020-09-02 22:42:33 -0400726 using INHERITED = GrContext;
Robert Phillipsad248452020-06-30 09:27:52 -0400727};
728
729
730#endif