blob: fd205d06c0ca50d7c2cffa61974209d92f17608a [file] [log] [blame]
robertphillips4fd74ae2016-08-03 14:26:53 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Adlai Hollera0693042020-10-14 11:23:11 -04008#ifndef GrDirectContextPriv_DEFINED
9#define GrDirectContextPriv_DEFINED
robertphillips4fd74ae2016-08-03 14:26:53 -070010
Brian Osmana5842bc2021-05-11 13:41:46 -040011#include "include/core/SkSpan.h"
Robert Phillips80bfda82020-11-12 09:23:36 -050012#include "include/core/SkSurface.h"
Robert Phillips4e105e22020-07-16 09:18:50 -040013#include "include/gpu/GrDirectContext.h"
Robert Phillips516405c2021-06-04 16:37:30 -040014#include "src/gpu/SkBaseGpuDevice.h"
robertphillips4fd74ae2016-08-03 14:26:53 -070015
Robert Phillipse19babf2020-04-06 13:57:30 -040016class GrAtlasManager;
Greg Daniel4065d452018-11-16 15:43:41 -050017class GrBackendFormat;
Greg Danielbcf612b2017-05-01 13:50:58 +000018class GrBackendRenderTarget;
Herb Derbye32e1ab2020-10-27 10:29:46 -040019class GrMemoryPool;
Brian Salomond17f6582017-07-19 18:28:58 -040020class GrOnFlushCallbackObject;
Robert Phillipse19babf2020-04-06 13:57:30 -040021class GrRenderTargetProxy;
Greg Danield85f97d2017-03-07 13:37:21 -050022class GrSemaphore;
Robert Phillipse2f7d182016-12-15 09:23:05 -050023class GrSurfaceProxy;
24
Robert Phillips62000362018-02-01 09:10:04 -050025class SkDeferredDisplayList;
Robert Phillipsbc429442019-02-20 08:26:03 -050026class SkTaskGroup;
Robert Phillips62000362018-02-01 09:10:04 -050027
Adlai Hollera0693042020-10-14 11:23:11 -040028/** Class that adds methods to GrDirectContext that are only intended for use internal to Skia.
29 This class is purely a privileged window into GrDirectContext. It should never have additional
robertphillips4fd74ae2016-08-03 14:26:53 -070030 data members or virtual methods. */
Adlai Hollera0693042020-10-14 11:23:11 -040031class GrDirectContextPriv {
robertphillips4fd74ae2016-08-03 14:26:53 -070032public:
Robert Phillips4217ea72019-01-30 13:08:28 -050033
34 // from GrContext_Base
Robert Phillipsfd0d9702019-02-01 10:19:42 -050035 uint32_t contextID() const { return fContext->contextID(); }
Robert Phillips4217ea72019-01-30 13:08:28 -050036
Robert Phillipsfe0963c2019-02-07 13:25:07 -050037 bool matches(GrContext_Base* candidate) const { return fContext->matches(candidate); }
38
Robert Phillipsc1541ae2019-02-04 12:05:37 -050039 const GrContextOptions& options() const { return fContext->options(); }
40
Robert Phillipsbb606772019-02-04 17:50:57 -050041 const GrCaps* caps() const { return fContext->caps(); }
Robert Phillipsa41c6852019-02-07 10:44:10 -050042 sk_sp<const GrCaps> refCaps() const;
Robert Phillipsbb606772019-02-04 17:50:57 -050043
Robert Phillipsa41c6852019-02-07 10:44:10 -050044 GrImageContext* asImageContext() { return fContext->asImageContext(); }
45 GrRecordingContext* asRecordingContext() { return fContext->asRecordingContext(); }
Robert Phillipsa41c6852019-02-07 10:44:10 -050046
Adlai Holler17776e32021-02-18 13:09:36 -050047 // from GrRecordingContext
Robert Phillipsa41c6852019-02-07 10:44:10 -050048 GrProxyProvider* proxyProvider() { return fContext->proxyProvider(); }
49 const GrProxyProvider* proxyProvider() const { return fContext->proxyProvider(); }
50
51 /** This is only useful for debug purposes */
52 SkDEBUGCODE(GrSingleOwner* singleOwner() const { return fContext->singleOwner(); } )
Robert Phillips4217ea72019-01-30 13:08:28 -050053
54 // from GrRecordingContext
Robert Phillips6f0e02f2019-02-13 11:02:28 -050055 GrDrawingManager* drawingManager() { return fContext->drawingManager(); }
Michael Ludwig28b0c5d2019-12-19 14:51:00 -050056
Michael Ludwig2c316bd2019-12-19 14:50:44 -050057 SkArenaAlloc* recordTimeAllocator() { return fContext->arenas().recordTimeAllocator(); }
Michael Ludwig28b0c5d2019-12-19 14:51:00 -050058 GrRecordingContext::Arenas arenas() { return fContext->arenas(); }
Robert Phillipsd6841482019-02-08 10:29:20 -050059
Robert Phillips4d932d12020-04-09 08:58:52 -040060 GrStrikeCache* getGrStrikeCache() { return fContext->fStrikeCache.get(); }
Robert Phillips2184fb72019-02-21 16:11:41 -050061 GrTextBlobCache* getTextBlobCache() { return fContext->getTextBlobCache(); }
62
Robert Phillipsd464feb2020-10-08 11:00:02 -040063 GrThreadSafeCache* threadSafeCache() { return fContext->threadSafeCache(); }
Robert Phillips12d06a32020-09-16 12:31:34 -040064
Robert Phillipsc5058a62019-02-15 12:52:59 -050065 /**
66 * Registers an object for flush-related callbacks. (See GrOnFlushCallbackObject.)
67 *
68 * NOTE: the drawing manager tracks this object as a raw pointer; it is up to the caller to
69 * ensure its lifetime is tied to that of the context.
70 */
71 void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
72
Robert Phillipsd6841482019-02-08 10:29:20 -050073 GrAuditTrail* auditTrail() { return fContext->auditTrail(); }
Robert Phillips4217ea72019-01-30 13:08:28 -050074
Robert Phillipse42edcc2017-12-13 11:50:22 -050075 /**
Brian Salomonf9a1fdf2019-05-09 10:30:12 -040076 * Finalizes all pending reads and writes to the surfaces and also performs an MSAA resolves
77 * if necessary. The GrSurfaceProxy array is treated as a hint. If it is supplied the context
78 * will guarantee that the draws required for those proxies are flushed but it could do more.
79 * If no array is provided then all current work will be flushed.
Robert Phillips7ee385e2017-03-30 08:02:11 -040080 *
81 * It is not necessary to call this before reading the render target via Skia/GrContext.
82 * GrContext will detect when it must perform a resolve before reading pixels back from the
83 * surface or using it as a texture.
84 */
Robert Phillips80bfda82020-11-12 09:23:36 -050085 GrSemaphoresSubmitted flushSurfaces(
86 SkSpan<GrSurfaceProxy*>,
87 SkSurface::BackendSurfaceAccess = SkSurface::BackendSurfaceAccess::kNoAccess,
88 const GrFlushInfo& = {},
89 const GrBackendSurfaceMutableState* newState = nullptr);
Brian Salomonf9a1fdf2019-05-09 10:30:12 -040090
Robert Phillips80bfda82020-11-12 09:23:36 -050091 /** Version of above that flushes for a single proxy. Null is allowed. */
92 GrSemaphoresSubmitted flushSurface(
93 GrSurfaceProxy* proxy,
94 SkSurface::BackendSurfaceAccess access = SkSurface::BackendSurfaceAccess::kNoAccess,
95 const GrFlushInfo& info = {},
96 const GrBackendSurfaceMutableState* newState = nullptr) {
97 size_t size = proxy ? 1 : 0;
98 return this->flushSurfaces({&proxy, size}, access, info, newState);
99 }
Robert Phillips7ee385e2017-03-30 08:02:11 -0400100
Greg Daniel6eb8c242019-06-05 10:22:24 -0400101 /**
102 * Returns true if createPMToUPMEffect and createUPMToPMEffect will succeed. In other words,
103 * did we find a pair of round-trip preserving conversion effects?
104 */
105 bool validPMUPMConversionExists();
Robert Phillipse78b7252017-04-06 07:59:41 -0400106
107 /**
Greg Daniel6eb8c242019-06-05 10:22:24 -0400108 * These functions create premul <-> unpremul effects, using the specialized round-trip effects
109 * from GrConfigConversionEffect.
Robert Phillipse78b7252017-04-06 07:59:41 -0400110 */
Greg Daniel6eb8c242019-06-05 10:22:24 -0400111 std::unique_ptr<GrFragmentProcessor> createPMToUPMEffect(std::unique_ptr<GrFragmentProcessor>);
112 std::unique_ptr<GrFragmentProcessor> createUPMToPMEffect(std::unique_ptr<GrFragmentProcessor>);
Robert Phillipse78b7252017-04-06 07:59:41 -0400113
Brian Osman51279982017-08-23 10:12:00 -0400114 SkTaskGroup* getTaskGroup() { return fContext->fTaskGroup.get(); }
115
Adlai Holler9555f292020-10-09 09:41:14 -0400116 GrResourceProvider* resourceProvider() { return fContext->fResourceProvider.get(); }
117 const GrResourceProvider* resourceProvider() const { return fContext->fResourceProvider.get(); }
Robert Phillips6be756b2018-01-16 15:07:54 -0500118
Adlai Holler9555f292020-10-09 09:41:14 -0400119 GrResourceCache* getResourceCache() { return fContext->fResourceCache.get(); }
Robert Phillips6be756b2018-01-16 15:07:54 -0500120
Robert Phillipsf35fd8d2018-01-22 10:48:15 -0500121 GrGpu* getGpu() { return fContext->fGpu.get(); }
122 const GrGpu* getGpu() const { return fContext->fGpu.get(); }
123
Robert Phillipsc4039ea2018-03-01 11:36:45 -0500124 // This accessor should only ever be called by the GrOpFlushState.
Robert Phillips5a66efb2018-03-07 15:13:18 -0500125 GrAtlasManager* getAtlasManager() {
126 return fContext->onGetAtlasManager();
Robert Phillipsc4039ea2018-03-01 11:36:45 -0500127 }
Robert Phillipsf35fd8d2018-01-22 10:48:15 -0500128
Robert Phillips5edf5102020-08-10 16:30:36 -0400129 // This accessor should only ever be called by the GrOpFlushState.
130 GrSmallPathAtlasMgr* getSmallPathAtlasMgr() {
131 return fContext->onGetSmallPathAtlasMgr();
132 }
133
Robert Phillipseb54bb52021-01-08 17:20:18 -0500134 void createDDLTask(sk_sp<const SkDeferredDisplayList>,
135 sk_sp<GrRenderTargetProxy> newDest,
Robert Phillips88b29612020-11-16 15:15:08 -0500136 SkIPoint offset);
Robert Phillips62000362018-02-01 09:10:04 -0500137
Robert Phillips43e7e4f2020-05-06 13:34:45 -0400138 bool compile(const GrProgramDesc&, const GrProgramInfo&);
Robert Phillips979b2232020-02-20 10:47:29 -0500139
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500140 GrContextOptions::PersistentCache* getPersistentCache() { return fContext->fPersistentCache; }
Brian Osman5e7fbfd2019-05-03 13:13:35 -0400141 GrContextOptions::ShaderErrorHandler* getShaderErrorHandler() const {
142 return fContext->fShaderErrorHandler;
143 }
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500144
Brian Salomon9241a6d2019-10-03 13:26:54 -0400145 GrClientMappedBufferManager* clientMappedBufferManager() {
146 return fContext->fMappedBufferManager.get();
147 }
148
Robert Phillips516405c2021-06-04 16:37:30 -0400149 sk_sp<SkBaseGpuDevice> createDevice(GrColorType,
150 sk_sp<GrSurfaceProxy>,
151 sk_sp<SkColorSpace>,
152 GrSurfaceOrigin,
153 const SkSurfaceProps&,
154 SkBaseGpuDevice::InitContents);
155 sk_sp<SkBaseGpuDevice> createDevice(SkBudgeted,
156 const SkImageInfo&,
157 SkBackingFit,
158 int sampleCount,
159 GrMipmapped,
160 GrProtected,
161 GrSurfaceOrigin,
162 const SkSurfaceProps&,
163 SkBaseGpuDevice::InitContents);
164
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500165#if GR_TEST_UTILS
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500166 /** Reset GPU stats */
Robert Phillips273f1072020-05-05 13:03:07 -0400167 void resetGpuStats() const;
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500168
169 /** Prints cache stats to the string if GR_CACHE_STATS == 1. */
170 void dumpCacheStats(SkString*) const;
171 void dumpCacheStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
172 void printCacheStats() const;
173
174 /** Prints GPU stats to the string if GR_GPU_STATS == 1. */
175 void dumpGpuStats(SkString*) const;
176 void dumpGpuStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
177 void printGpuStats() const;
178
Robert Phillips273f1072020-05-05 13:03:07 -0400179 /** These are only active if GR_GPU_STATS == 1. */
180 void resetContextStats() const;
181 void dumpContextStats(SkString*) const;
182 void dumpContextStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
183 void printContextStats() const;
184
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500185 /** Get pointer to atlas texture for given mask format. Note that this wraps an
186 actively mutating texture in an SkImage. This could yield unexpected results
187 if it gets cached or used more generally. */
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500188 sk_sp<SkImage> testingOnly_getFontAtlasImage(GrMaskFormat format, unsigned int index = 0);
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500189
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500190 /**
191 * Purge all the unlocked resources from the cache.
192 * This entry point is mainly meant for timing texture uploads
193 * and is not defined in normal builds of Skia.
194 */
195 void testingOnly_purgeAllUnlockedResources();
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500196
Robert Phillipsdbaf3172019-02-06 15:12:53 -0500197 void testingOnly_flushAndRemoveOnFlushCallbackObject(GrOnFlushCallbackObject*);
198#endif
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500199
robertphillips4fd74ae2016-08-03 14:26:53 -0700200private:
Adlai Hollera0693042020-10-14 11:23:11 -0400201 explicit GrDirectContextPriv(GrDirectContext* context) : fContext(context) {}
202 GrDirectContextPriv(const GrDirectContextPriv&) = delete;
203 GrDirectContextPriv& operator=(const GrDirectContextPriv&) = delete;
robertphillips4fd74ae2016-08-03 14:26:53 -0700204
205 // No taking addresses of this type.
Adlai Hollera0693042020-10-14 11:23:11 -0400206 const GrDirectContextPriv* operator&() const;
207 GrDirectContextPriv* operator&();
robertphillips4fd74ae2016-08-03 14:26:53 -0700208
Adlai Holler53cf44c2020-10-13 17:40:21 -0400209 GrDirectContext* fContext;
robertphillips4fd74ae2016-08-03 14:26:53 -0700210
Adlai Holler53cf44c2020-10-13 17:40:21 -0400211 friend class GrDirectContext; // to construct/copy this type.
robertphillips4fd74ae2016-08-03 14:26:53 -0700212};
213
Adlai Hollera0693042020-10-14 11:23:11 -0400214inline GrDirectContextPriv GrDirectContext::priv() { return GrDirectContextPriv(this); }
robertphillips4fd74ae2016-08-03 14:26:53 -0700215
Adlai Hollera0693042020-10-14 11:23:11 -0400216// NOLINTNEXTLINE(readability-const-return-type)
217inline const GrDirectContextPriv GrDirectContext::priv() const {
218 return GrDirectContextPriv(const_cast<GrDirectContext*>(this));
robertphillips4fd74ae2016-08-03 14:26:53 -0700219}
220
221#endif