blob: ac65748dfa4b61e394424bebcea1c543fce668e3 [file] [log] [blame]
robertphillips4fd74ae2016-08-03 14:26:53 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Adlai Hollera0693042020-10-14 11:23:11 -04008#ifndef GrDirectContextPriv_DEFINED
9#define GrDirectContextPriv_DEFINED
robertphillips4fd74ae2016-08-03 14:26:53 -070010
Brian Osmana5842bc2021-05-11 13:41:46 -040011#include "include/core/SkSpan.h"
Robert Phillips80bfda82020-11-12 09:23:36 -050012#include "include/core/SkSurface.h"
Robert Phillips4e105e22020-07-16 09:18:50 -040013#include "include/gpu/GrDirectContext.h"
Robert Phillipscc44feb2021-07-06 12:21:37 -040014#include "src/gpu/BaseDevice.h"
robertphillips4fd74ae2016-08-03 14:26:53 -070015
Robert Phillipse19babf2020-04-06 13:57:30 -040016class GrAtlasManager;
Greg Daniel4065d452018-11-16 15:43:41 -050017class GrBackendFormat;
Greg Danielbcf612b2017-05-01 13:50:58 +000018class GrBackendRenderTarget;
Herb Derbye32e1ab2020-10-27 10:29:46 -040019class GrMemoryPool;
Brian Salomond17f6582017-07-19 18:28:58 -040020class GrOnFlushCallbackObject;
Robert Phillipse19babf2020-04-06 13:57:30 -040021class GrRenderTargetProxy;
Greg Danield85f97d2017-03-07 13:37:21 -050022class GrSemaphore;
Robert Phillipse2f7d182016-12-15 09:23:05 -050023class GrSurfaceProxy;
24
Robert Phillips62000362018-02-01 09:10:04 -050025class SkDeferredDisplayList;
Robert Phillipsbc429442019-02-20 08:26:03 -050026class SkTaskGroup;
Robert Phillips62000362018-02-01 09:10:04 -050027
Adlai Hollera0693042020-10-14 11:23:11 -040028/** Class that adds methods to GrDirectContext that are only intended for use internal to Skia.
29 This class is purely a privileged window into GrDirectContext. It should never have additional
robertphillips4fd74ae2016-08-03 14:26:53 -070030 data members or virtual methods. */
Adlai Hollera0693042020-10-14 11:23:11 -040031class GrDirectContextPriv {
robertphillips4fd74ae2016-08-03 14:26:53 -070032public:
Robert Phillips4217ea72019-01-30 13:08:28 -050033
34 // from GrContext_Base
Robert Phillipsfd0d9702019-02-01 10:19:42 -050035 uint32_t contextID() const { return fContext->contextID(); }
Robert Phillips4217ea72019-01-30 13:08:28 -050036
Robert Phillipsfe0963c2019-02-07 13:25:07 -050037 bool matches(GrContext_Base* candidate) const { return fContext->matches(candidate); }
38
Robert Phillipsc1541ae2019-02-04 12:05:37 -050039 const GrContextOptions& options() const { return fContext->options(); }
40
Robert Phillipsbb606772019-02-04 17:50:57 -050041 const GrCaps* caps() const { return fContext->caps(); }
Robert Phillipsa41c6852019-02-07 10:44:10 -050042 sk_sp<const GrCaps> refCaps() const;
Robert Phillipsbb606772019-02-04 17:50:57 -050043
Robert Phillipsa41c6852019-02-07 10:44:10 -050044 GrImageContext* asImageContext() { return fContext->asImageContext(); }
45 GrRecordingContext* asRecordingContext() { return fContext->asRecordingContext(); }
Robert Phillipsa41c6852019-02-07 10:44:10 -050046
Adlai Holler17776e32021-02-18 13:09:36 -050047 // from GrRecordingContext
Robert Phillipsa41c6852019-02-07 10:44:10 -050048 GrProxyProvider* proxyProvider() { return fContext->proxyProvider(); }
49 const GrProxyProvider* proxyProvider() const { return fContext->proxyProvider(); }
50
51 /** This is only useful for debug purposes */
52 SkDEBUGCODE(GrSingleOwner* singleOwner() const { return fContext->singleOwner(); } )
Robert Phillips4217ea72019-01-30 13:08:28 -050053
54 // from GrRecordingContext
Robert Phillips6f0e02f2019-02-13 11:02:28 -050055 GrDrawingManager* drawingManager() { return fContext->drawingManager(); }
Michael Ludwig28b0c5d2019-12-19 14:51:00 -050056
Michael Ludwig2c316bd2019-12-19 14:50:44 -050057 SkArenaAlloc* recordTimeAllocator() { return fContext->arenas().recordTimeAllocator(); }
Michael Ludwig28b0c5d2019-12-19 14:51:00 -050058 GrRecordingContext::Arenas arenas() { return fContext->arenas(); }
Robert Phillipsd6841482019-02-08 10:29:20 -050059
Robert Phillips4d932d12020-04-09 08:58:52 -040060 GrStrikeCache* getGrStrikeCache() { return fContext->fStrikeCache.get(); }
Robert Phillips2184fb72019-02-21 16:11:41 -050061 GrTextBlobCache* getTextBlobCache() { return fContext->getTextBlobCache(); }
62
Robert Phillipsd464feb2020-10-08 11:00:02 -040063 GrThreadSafeCache* threadSafeCache() { return fContext->threadSafeCache(); }
Robert Phillips12d06a32020-09-16 12:31:34 -040064
Robert Phillipsc5058a62019-02-15 12:52:59 -050065 /**
66 * Registers an object for flush-related callbacks. (See GrOnFlushCallbackObject.)
67 *
68 * NOTE: the drawing manager tracks this object as a raw pointer; it is up to the caller to
69 * ensure its lifetime is tied to that of the context.
70 */
71 void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
72
Robert Phillipsd6841482019-02-08 10:29:20 -050073 GrAuditTrail* auditTrail() { return fContext->auditTrail(); }
Robert Phillips4217ea72019-01-30 13:08:28 -050074
Robert Phillipse42edcc2017-12-13 11:50:22 -050075 /**
Brian Salomonf9a1fdf2019-05-09 10:30:12 -040076 * Finalizes all pending reads and writes to the surfaces and also performs an MSAA resolves
77 * if necessary. The GrSurfaceProxy array is treated as a hint. If it is supplied the context
78 * will guarantee that the draws required for those proxies are flushed but it could do more.
79 * If no array is provided then all current work will be flushed.
Robert Phillips7ee385e2017-03-30 08:02:11 -040080 *
81 * It is not necessary to call this before reading the render target via Skia/GrContext.
82 * GrContext will detect when it must perform a resolve before reading pixels back from the
83 * surface or using it as a texture.
84 */
Robert Phillips80bfda82020-11-12 09:23:36 -050085 GrSemaphoresSubmitted flushSurfaces(
86 SkSpan<GrSurfaceProxy*>,
87 SkSurface::BackendSurfaceAccess = SkSurface::BackendSurfaceAccess::kNoAccess,
88 const GrFlushInfo& = {},
89 const GrBackendSurfaceMutableState* newState = nullptr);
Brian Salomonf9a1fdf2019-05-09 10:30:12 -040090
Robert Phillips80bfda82020-11-12 09:23:36 -050091 /** Version of above that flushes for a single proxy. Null is allowed. */
92 GrSemaphoresSubmitted flushSurface(
93 GrSurfaceProxy* proxy,
94 SkSurface::BackendSurfaceAccess access = SkSurface::BackendSurfaceAccess::kNoAccess,
95 const GrFlushInfo& info = {},
96 const GrBackendSurfaceMutableState* newState = nullptr) {
97 size_t size = proxy ? 1 : 0;
98 return this->flushSurfaces({&proxy, size}, access, info, newState);
99 }
Robert Phillips7ee385e2017-03-30 08:02:11 -0400100
Greg Daniel6eb8c242019-06-05 10:22:24 -0400101 /**
102 * Returns true if createPMToUPMEffect and createUPMToPMEffect will succeed. In other words,
103 * did we find a pair of round-trip preserving conversion effects?
104 */
105 bool validPMUPMConversionExists();
Robert Phillipse78b7252017-04-06 07:59:41 -0400106
107 /**
Brian Osman4c886ee2021-07-07 13:34:50 -0400108 * These functions create premul <-> unpremul effects, using specialized round-trip effects.
Robert Phillipse78b7252017-04-06 07:59:41 -0400109 */
Greg Daniel6eb8c242019-06-05 10:22:24 -0400110 std::unique_ptr<GrFragmentProcessor> createPMToUPMEffect(std::unique_ptr<GrFragmentProcessor>);
111 std::unique_ptr<GrFragmentProcessor> createUPMToPMEffect(std::unique_ptr<GrFragmentProcessor>);
Robert Phillipse78b7252017-04-06 07:59:41 -0400112
Brian Osman51279982017-08-23 10:12:00 -0400113 SkTaskGroup* getTaskGroup() { return fContext->fTaskGroup.get(); }
114
Adlai Holler9555f292020-10-09 09:41:14 -0400115 GrResourceProvider* resourceProvider() { return fContext->fResourceProvider.get(); }
116 const GrResourceProvider* resourceProvider() const { return fContext->fResourceProvider.get(); }
Robert Phillips6be756b2018-01-16 15:07:54 -0500117
Adlai Holler9555f292020-10-09 09:41:14 -0400118 GrResourceCache* getResourceCache() { return fContext->fResourceCache.get(); }
Robert Phillips6be756b2018-01-16 15:07:54 -0500119
Robert Phillipsf35fd8d2018-01-22 10:48:15 -0500120 GrGpu* getGpu() { return fContext->fGpu.get(); }
121 const GrGpu* getGpu() const { return fContext->fGpu.get(); }
122
Robert Phillipsc4039ea2018-03-01 11:36:45 -0500123 // This accessor should only ever be called by the GrOpFlushState.
Robert Phillips5a66efb2018-03-07 15:13:18 -0500124 GrAtlasManager* getAtlasManager() {
125 return fContext->onGetAtlasManager();
Robert Phillipsc4039ea2018-03-01 11:36:45 -0500126 }
Robert Phillipsf35fd8d2018-01-22 10:48:15 -0500127
Robert Phillips5edf5102020-08-10 16:30:36 -0400128 // This accessor should only ever be called by the GrOpFlushState.
129 GrSmallPathAtlasMgr* getSmallPathAtlasMgr() {
130 return fContext->onGetSmallPathAtlasMgr();
131 }
132
Robert Phillipseb54bb52021-01-08 17:20:18 -0500133 void createDDLTask(sk_sp<const SkDeferredDisplayList>,
134 sk_sp<GrRenderTargetProxy> newDest,
Robert Phillips88b29612020-11-16 15:15:08 -0500135 SkIPoint offset);
Robert Phillips62000362018-02-01 09:10:04 -0500136
Robert Phillips43e7e4f2020-05-06 13:34:45 -0400137 bool compile(const GrProgramDesc&, const GrProgramInfo&);
Robert Phillips979b2232020-02-20 10:47:29 -0500138
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500139 GrContextOptions::PersistentCache* getPersistentCache() { return fContext->fPersistentCache; }
Brian Osman5e7fbfd2019-05-03 13:13:35 -0400140 GrContextOptions::ShaderErrorHandler* getShaderErrorHandler() const {
141 return fContext->fShaderErrorHandler;
142 }
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500143
Brian Salomon9241a6d2019-10-03 13:26:54 -0400144 GrClientMappedBufferManager* clientMappedBufferManager() {
145 return fContext->fMappedBufferManager.get();
146 }
147
Robert Phillipscc44feb2021-07-06 12:21:37 -0400148 sk_sp<skgpu::BaseDevice> createDevice(GrColorType,
149 sk_sp<GrSurfaceProxy>,
150 sk_sp<SkColorSpace>,
151 GrSurfaceOrigin,
152 const SkSurfaceProps&,
153 skgpu::BaseDevice::InitContents);
154 sk_sp<skgpu::BaseDevice> createDevice(SkBudgeted,
155 const SkImageInfo&,
156 SkBackingFit,
157 int sampleCount,
158 GrMipmapped,
159 GrProtected,
160 GrSurfaceOrigin,
161 const SkSurfaceProps&,
162 skgpu::BaseDevice::InitContents);
Robert Phillips516405c2021-06-04 16:37:30 -0400163
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500164#if GR_TEST_UTILS
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500165 /** Reset GPU stats */
Robert Phillips273f1072020-05-05 13:03:07 -0400166 void resetGpuStats() const;
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500167
168 /** Prints cache stats to the string if GR_CACHE_STATS == 1. */
169 void dumpCacheStats(SkString*) const;
170 void dumpCacheStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
171 void printCacheStats() const;
172
173 /** Prints GPU stats to the string if GR_GPU_STATS == 1. */
174 void dumpGpuStats(SkString*) const;
175 void dumpGpuStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
176 void printGpuStats() const;
177
Robert Phillips273f1072020-05-05 13:03:07 -0400178 /** These are only active if GR_GPU_STATS == 1. */
179 void resetContextStats() const;
180 void dumpContextStats(SkString*) const;
181 void dumpContextStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
182 void printContextStats() const;
183
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500184 /** Get pointer to atlas texture for given mask format. Note that this wraps an
185 actively mutating texture in an SkImage. This could yield unexpected results
186 if it gets cached or used more generally. */
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500187 sk_sp<SkImage> testingOnly_getFontAtlasImage(GrMaskFormat format, unsigned int index = 0);
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500188
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500189 void testingOnly_flushAndRemoveOnFlushCallbackObject(GrOnFlushCallbackObject*);
190#endif
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500191
robertphillips4fd74ae2016-08-03 14:26:53 -0700192private:
Adlai Hollera0693042020-10-14 11:23:11 -0400193 explicit GrDirectContextPriv(GrDirectContext* context) : fContext(context) {}
194 GrDirectContextPriv(const GrDirectContextPriv&) = delete;
195 GrDirectContextPriv& operator=(const GrDirectContextPriv&) = delete;
robertphillips4fd74ae2016-08-03 14:26:53 -0700196
197 // No taking addresses of this type.
Adlai Hollera0693042020-10-14 11:23:11 -0400198 const GrDirectContextPriv* operator&() const;
199 GrDirectContextPriv* operator&();
robertphillips4fd74ae2016-08-03 14:26:53 -0700200
Adlai Holler53cf44c2020-10-13 17:40:21 -0400201 GrDirectContext* fContext;
robertphillips4fd74ae2016-08-03 14:26:53 -0700202
Adlai Holler53cf44c2020-10-13 17:40:21 -0400203 friend class GrDirectContext; // to construct/copy this type.
robertphillips4fd74ae2016-08-03 14:26:53 -0700204};
205
Adlai Hollera0693042020-10-14 11:23:11 -0400206inline GrDirectContextPriv GrDirectContext::priv() { return GrDirectContextPriv(this); }
robertphillips4fd74ae2016-08-03 14:26:53 -0700207
Adlai Hollera0693042020-10-14 11:23:11 -0400208// NOLINTNEXTLINE(readability-const-return-type)
209inline const GrDirectContextPriv GrDirectContext::priv() const {
210 return GrDirectContextPriv(const_cast<GrDirectContext*>(this));
robertphillips4fd74ae2016-08-03 14:26:53 -0700211}
212
213#endif