blob: deac90002ea00e241a5f66a272d5d44aedf42016 [file] [log] [blame]
robertphillips76948d42016-05-04 12:47:41 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrSurfaceProxy_DEFINED
9#define GrSurfaceProxy_DEFINED
10
11#include "GrGpuResource.h"
Robert Phillipsc7635fa2016-10-28 13:25:24 -040012#include "GrSurface.h"
Robert Phillips93f16332016-11-23 19:37:13 -050013
csmartdaltonbf4a8f92016-09-06 10:01:06 -070014#include "SkRect.h"
robertphillips76948d42016-05-04 12:47:41 -070015
Greg Daniel7ef28f32017-04-20 16:41:55 +000016class GrBackendTexture;
Robert Phillips37430132016-11-09 06:50:43 -050017class GrCaps;
Robert Phillipsc589b0b2017-04-17 07:53:07 -040018class GrOpList;
Robert Phillips1afd4cd2018-01-08 13:40:32 -050019class GrProxyProvider;
Brian Osman45580d32016-11-23 09:37:01 -050020class GrRenderTargetOpList;
21class GrRenderTargetProxy;
Brian Osman32342f02017-03-04 08:12:46 -050022class GrResourceProvider;
Robert Phillipsd46697a2017-01-25 12:10:37 -050023class GrSurfaceContext;
Robert Phillips757914d2017-01-25 15:48:30 -050024class GrSurfaceProxyPriv;
Brian Osman45580d32016-11-23 09:37:01 -050025class GrTextureOpList;
robertphillips76948d42016-05-04 12:47:41 -070026class GrTextureProxy;
robertphillips76948d42016-05-04 12:47:41 -070027
Robert Phillipsc7635fa2016-10-28 13:25:24 -040028// This class replicates the functionality GrIORef<GrSurface> but tracks the
29// utilitization for later resource allocation (for the deferred case) and
30// forwards on the utilization in the wrapped case
31class GrIORefProxy : public SkNoncopyable {
32public:
33 void ref() const {
34 this->validate();
35
36 ++fRefCnt;
37 if (fTarget) {
38 fTarget->ref();
39 }
40 }
41
42 void unref() const {
43 this->validate();
44
45 if (fTarget) {
46 fTarget->unref();
47 }
48
Robert Phillipsb6deea82017-05-11 14:14:30 -040049 --fRefCnt;
50 this->didRemoveRefOrPendingIO();
Robert Phillipsc7635fa2016-10-28 13:25:24 -040051 }
52
Chris Daltona32a3c32017-12-05 10:05:21 -070053#ifdef SK_DEBUG
54 bool isUnique_debugOnly() const { // For asserts.
55 SkASSERT(fRefCnt >= 0 && fPendingWrites >= 0 && fPendingReads >= 0);
56 return 1 == fRefCnt + fPendingWrites + fPendingReads;
57 }
58#endif
59
Robert Phillips4bc70112018-03-01 10:24:02 -050060 void release() {
Greg Daniel4684f822018-03-08 15:27:36 -050061 // The proxy itself may still have multiple refs. It can be owned by an SkImage and multiple
62 // SkDeferredDisplayLists at the same time if we are using DDLs.
Robert Phillips4bc70112018-03-01 10:24:02 -050063 SkASSERT(0 == fPendingReads);
64 SkASSERT(0 == fPendingWrites);
65
Robert Phillipse4aae342018-03-14 10:26:57 -040066 SkASSERT(fRefCnt == fTarget->fRefCnt);
Robert Phillips4bc70112018-03-01 10:24:02 -050067 SkASSERT(!fTarget->internalHasPendingIO());
Robert Phillipse4aae342018-03-14 10:26:57 -040068 // In the current hybrid world, the proxy and backing surface are ref/unreffed in
69 // synchrony. In this instance we're deInstantiating the proxy so, regardless of the
70 // number of refs on the backing surface, we're going to remove it. If/when the proxy
71 // is re-instantiated all the refs on the proxy (presumably due to multiple uses in ops)
72 // will be transfered to the new surface.
73 for (int refs = fTarget->fRefCnt; refs; --refs) {
74 fTarget->unref();
75 }
Robert Phillips4bc70112018-03-01 10:24:02 -050076 fTarget = nullptr;
77 }
78
Robert Phillipsc7635fa2016-10-28 13:25:24 -040079 void validate() const {
Robert Phillipsb6deea82017-05-11 14:14:30 -040080#ifdef SK_DEBUG
81 SkASSERT(fRefCnt >= 0);
robertphillips1125a032016-11-16 11:17:17 -080082 SkASSERT(fPendingReads >= 0);
83 SkASSERT(fPendingWrites >= 0);
84 SkASSERT(fRefCnt + fPendingReads + fPendingWrites >= 1);
85
86 if (fTarget) {
robertphillips1125a032016-11-16 11:17:17 -080087 // The backing GrSurface can have more refs than the proxy if the proxy
88 // started off wrapping an external resource (that came in with refs).
89 // The GrSurface should never have fewer refs than the proxy however.
90 SkASSERT(fTarget->fRefCnt >= fRefCnt);
Robert Phillipsb6deea82017-05-11 14:14:30 -040091 SkASSERT(fTarget->fPendingReads >= fPendingReads);
92 SkASSERT(fTarget->fPendingWrites >= fPendingWrites);
robertphillips1125a032016-11-16 11:17:17 -080093 }
Robert Phillipsc7635fa2016-10-28 13:25:24 -040094#endif
95 }
96
robertphillips1125a032016-11-16 11:17:17 -080097 int32_t getProxyRefCnt_TestOnly() const;
98 int32_t getBackingRefCnt_TestOnly() const;
99 int32_t getPendingReadCnt_TestOnly() const;
100 int32_t getPendingWriteCnt_TestOnly() const;
101
Brian Salomoncf75b002017-08-16 10:42:09 -0400102 void addPendingRead() const {
103 this->validate();
104
105 ++fPendingReads;
106 if (fTarget) {
107 fTarget->addPendingRead();
108 }
109 }
110
111 void completedRead() const {
112 this->validate();
113
114 if (fTarget) {
115 fTarget->completedRead();
116 }
117
118 --fPendingReads;
119 this->didRemoveRefOrPendingIO();
120 }
121
122 void addPendingWrite() const {
123 this->validate();
124
125 ++fPendingWrites;
126 if (fTarget) {
127 fTarget->addPendingWrite();
128 }
129 }
130
131 void completedWrite() const {
132 this->validate();
133
134 if (fTarget) {
135 fTarget->completedWrite();
136 }
137
138 --fPendingWrites;
139 this->didRemoveRefOrPendingIO();
140 }
141
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400142protected:
robertphillips1125a032016-11-16 11:17:17 -0800143 GrIORefProxy() : fTarget(nullptr), fRefCnt(1), fPendingReads(0), fPendingWrites(0) {}
144 GrIORefProxy(sk_sp<GrSurface> surface) : fRefCnt(1), fPendingReads(0), fPendingWrites(0) {
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400145 // Since we're manually forwarding on refs & unrefs we don't want sk_sp doing
146 // anything extra.
147 fTarget = surface.release();
148 }
149 virtual ~GrIORefProxy() {
150 // We don't unref 'fTarget' here since the 'unref' method will already
Robert Phillips4bc70112018-03-01 10:24:02 -0500151 // have forwarded on the unref call that got us here.
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400152 }
153
robertphillips1125a032016-11-16 11:17:17 -0800154 // This GrIORefProxy was deferred before but has just been instantiated. To
155 // make all the reffing & unreffing work out we now need to transfer any deferred
156 // refs & unrefs to the new GrSurface
157 void transferRefs() {
158 SkASSERT(fTarget);
159
Robert Phillipsf8e25022017-11-08 15:24:31 -0500160 SkASSERT(fTarget->fRefCnt > 0);
Robert Phillips2d9cb572017-11-13 13:38:05 +0000161 fTarget->fRefCnt += (fRefCnt-1); // don't xfer the proxy's creation ref
robertphillips1125a032016-11-16 11:17:17 -0800162 fTarget->fPendingReads += fPendingReads;
163 fTarget->fPendingWrites += fPendingWrites;
robertphillips1125a032016-11-16 11:17:17 -0800164 }
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400165
Robert Phillips757914d2017-01-25 15:48:30 -0500166 bool internalHasPendingIO() const {
167 if (fTarget) {
168 return fTarget->internalHasPendingIO();
169 }
170
171 return SkToBool(fPendingWrites | fPendingReads);
172 }
173
Robert Phillips7ee385e2017-03-30 08:02:11 -0400174 bool internalHasPendingWrite() const {
175 if (fTarget) {
176 return fTarget->internalHasPendingWrite();
177 }
178
179 return SkToBool(fPendingWrites);
180 }
181
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400182 // For deferred proxies this will be null. For wrapped proxies it will point to the
183 // wrapped resource.
184 GrSurface* fTarget;
robertphillips1125a032016-11-16 11:17:17 -0800185
186private:
187 // This class is used to manage conversion of refs to pending reads/writes.
Robert Phillipsb6deea82017-05-11 14:14:30 -0400188 friend class GrSurfaceProxyRef;
robertphillips1125a032016-11-16 11:17:17 -0800189 template <typename, GrIOType> friend class GrPendingIOResource;
190
Robert Phillipsb6deea82017-05-11 14:14:30 -0400191 void didRemoveRefOrPendingIO() const {
192 if (0 == fPendingReads && 0 == fPendingWrites && 0 == fRefCnt) {
193 delete this;
194 }
robertphillips1125a032016-11-16 11:17:17 -0800195 }
196
197 mutable int32_t fRefCnt;
198 mutable int32_t fPendingReads;
199 mutable int32_t fPendingWrites;
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400200};
201
202class GrSurfaceProxy : public GrIORefProxy {
robertphillips76948d42016-05-04 12:47:41 -0700203public:
Greg Daniel457469c2018-02-08 15:05:44 -0500204 enum class LazyInstantiationType {
Greg Daniel4684f822018-03-08 15:27:36 -0500205 kSingleUse, // Instantiation callback is allowed to be called only once
206 kMultipleUse, // Instantiation callback can be called multiple times.
207 kUninstantiate, // Instantiation callback can be called multiple times,
208 // but we will uninstantiate the proxy after every flush
Greg Daniel457469c2018-02-08 15:05:44 -0500209 };
210
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500211 enum class LazyState {
Greg Daniel0a375db2018-02-01 12:21:39 -0500212 kNot, // The proxy is instantiated or does not have a lazy callback
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500213 kPartially, // The proxy has a lazy callback but knows basic information about itself.
214 kFully, // The proxy has a lazy callback and also doesn't know its width, height, etc.
215 };
216
217 LazyState lazyInstantiationState() const {
Greg Daniel0a375db2018-02-01 12:21:39 -0500218 if (fTarget || !SkToBool(fLazyInstantiateCallback)) {
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500219 return LazyState::kNot;
220 } else {
221 if (fWidth <= 0) {
222 SkASSERT(fHeight <= 0);
223 return LazyState::kFully;
224 } else {
225 SkASSERT(fHeight > 0);
226 return LazyState::kPartially;
227 }
228 }
229 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700230
231 GrPixelConfig config() const { return fConfig; }
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500232 int width() const {
233 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
234 return fWidth;
235 }
236 int height() const {
237 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
238 return fHeight;
239 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700240 int worstCaseWidth() const;
241 int worstCaseHeight() const;
robertphillips76948d42016-05-04 12:47:41 -0700242 GrSurfaceOrigin origin() const {
Brian Salomonbb5711a2017-05-17 13:49:59 -0400243 SkASSERT(kTopLeft_GrSurfaceOrigin == fOrigin || kBottomLeft_GrSurfaceOrigin == fOrigin);
244 return fOrigin;
robertphillips76948d42016-05-04 12:47:41 -0700245 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700246
Robert Phillips294870f2016-11-11 12:38:40 -0500247 class UniqueID {
248 public:
Robert Phillips7ee385e2017-03-30 08:02:11 -0400249 static UniqueID InvalidID() {
250 return UniqueID(uint32_t(SK_InvalidUniqueID));
251 }
252
Robert Phillips294870f2016-11-11 12:38:40 -0500253 // wrapped
254 explicit UniqueID(const GrGpuResource::UniqueID& id) : fID(id.asUInt()) { }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700255 // deferred and lazy-callback
Robert Phillips294870f2016-11-11 12:38:40 -0500256 UniqueID() : fID(GrGpuResource::CreateUniqueID()) { }
257
258 uint32_t asUInt() const { return fID; }
259
260 bool operator==(const UniqueID& other) const {
261 return fID == other.fID;
262 }
263 bool operator!=(const UniqueID& other) const {
264 return !(*this == other);
265 }
266
Robert Phillips7ee385e2017-03-30 08:02:11 -0400267 void makeInvalid() { fID = SK_InvalidUniqueID; }
Robert Phillips294870f2016-11-11 12:38:40 -0500268 bool isInvalid() const { return SK_InvalidUniqueID == fID; }
269
270 private:
Robert Phillips7ee385e2017-03-30 08:02:11 -0400271 explicit UniqueID(uint32_t id) : fID(id) {}
272
273 uint32_t fID;
Robert Phillips294870f2016-11-11 12:38:40 -0500274 };
275
276 /*
277 * The contract for the uniqueID is:
278 * for wrapped resources:
279 * the uniqueID will match that of the wrapped resource
280 *
281 * for deferred resources:
282 * the uniqueID will be different from the real resource, when it is allocated
283 * the proxy's uniqueID will not change across the instantiate call
284 *
285 * the uniqueIDs of the proxies and the resources draw from the same pool
286 *
287 * What this boils down to is that the uniqueID of a proxy can be used to consistently
288 * track/identify a proxy but should never be used to distinguish between
289 * resources and proxies - beware!
290 */
291 UniqueID uniqueID() const { return fUniqueID; }
robertphillips76948d42016-05-04 12:47:41 -0700292
Robert Phillips159e3c62017-06-19 12:02:00 -0400293 UniqueID underlyingUniqueID() const {
294 if (fTarget) {
295 return UniqueID(fTarget->uniqueID());
296 }
297
298 return fUniqueID;
299 }
300
Robert Phillipseee4d6e2017-06-05 09:26:07 -0400301 virtual bool instantiate(GrResourceProvider* resourceProvider) = 0;
Robert Phillips37430132016-11-09 06:50:43 -0500302
Robert Phillips4bc70112018-03-01 10:24:02 -0500303 void deInstantiate();
304
robertphillips76948d42016-05-04 12:47:41 -0700305 /**
robertphillips13a7eee2016-08-31 15:06:24 -0700306 * Helper that gets the width and height of the surface as a bounding rectangle.
307 */
Chris Dalton706a6ff2017-11-29 22:01:06 -0700308 SkRect getBoundsRect() const {
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500309 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
Chris Dalton706a6ff2017-11-29 22:01:06 -0700310 return SkRect::MakeIWH(this->width(), this->height());
311 }
Robert Phillips784b7bf2016-12-09 13:35:02 -0500312
robertphillips13a7eee2016-08-31 15:06:24 -0700313 /**
robertphillips76948d42016-05-04 12:47:41 -0700314 * @return the texture proxy associated with the surface proxy, may be NULL.
315 */
316 virtual GrTextureProxy* asTextureProxy() { return nullptr; }
317 virtual const GrTextureProxy* asTextureProxy() const { return nullptr; }
318
319 /**
320 * @return the render target proxy associated with the surface proxy, may be NULL.
321 */
322 virtual GrRenderTargetProxy* asRenderTargetProxy() { return nullptr; }
323 virtual const GrRenderTargetProxy* asRenderTargetProxy() const { return nullptr; }
324
robertphillips13a7eee2016-08-31 15:06:24 -0700325 /**
326 * Does the resource count against the resource budget?
327 */
328 SkBudgeted isBudgeted() const { return fBudgeted; }
329
Robert Phillipsf2361d22016-10-25 14:20:06 -0400330 void setLastOpList(GrOpList* opList);
331 GrOpList* getLastOpList() { return fLastOpList; }
332
Brian Osman45580d32016-11-23 09:37:01 -0500333 GrRenderTargetOpList* getLastRenderTargetOpList();
334 GrTextureOpList* getLastTextureOpList();
335
Robert Phillips8bc06d02016-11-01 17:28:40 -0400336 /**
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400337 * Retrieves the amount of GPU memory that will be or currently is used by this resource
Robert Phillips8bc06d02016-11-01 17:28:40 -0400338 * in bytes. It is approximate since we aren't aware of additional padding or copies made
339 * by the driver.
340 *
341 * @return the amount of GPU memory used in bytes
342 */
343 size_t gpuMemorySize() const {
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500344 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
Brian Salomonbb5711a2017-05-17 13:49:59 -0400345 if (fTarget) {
346 return fTarget->gpuMemorySize();
347 }
Robert Phillips8bc06d02016-11-01 17:28:40 -0400348 if (kInvalidGpuMemorySize == fGpuMemorySize) {
Brian Salomonbb5711a2017-05-17 13:49:59 -0400349 fGpuMemorySize = this->onUninstantiatedGpuMemorySize();
Robert Phillips8bc06d02016-11-01 17:28:40 -0400350 SkASSERT(kInvalidGpuMemorySize != fGpuMemorySize);
351 }
352 return fGpuMemorySize;
353 }
354
Robert Phillipse2f7d182016-12-15 09:23:05 -0500355 // Helper function that creates a temporary SurfaceContext to perform the copy
Robert Phillips0ae6faa2017-03-21 16:22:00 -0400356 // It always returns a kExact-backed proxy bc it is used when converting an SkSpecialImage
Brian Salomon63e79732017-05-15 21:23:13 -0400357 // to an SkImage. The copy is is not a render target and not multisampled.
Greg Daniel65c7f662017-10-30 13:39:09 -0400358 static sk_sp<GrTextureProxy> Copy(GrContext*, GrSurfaceProxy* src, GrMipMapped,
Robert Phillipse2f7d182016-12-15 09:23:05 -0500359 SkIRect srcRect, SkBudgeted);
360
361 // Copy the entire 'src'
Robert Phillips0ae6faa2017-03-21 16:22:00 -0400362 // It always returns a kExact-backed proxy bc it is used in SkGpuDevice::snapSpecial
Greg Daniel65c7f662017-10-30 13:39:09 -0400363 static sk_sp<GrTextureProxy> Copy(GrContext* context, GrSurfaceProxy* src, GrMipMapped,
Robert Phillips63c67462017-02-15 14:19:01 -0500364 SkBudgeted budgeted);
Robert Phillipse2f7d182016-12-15 09:23:05 -0500365
366 // Test-only entry point - should decrease in use as proxies propagate
Robert Phillipsd46697a2017-01-25 12:10:37 -0500367 static sk_sp<GrSurfaceContext> TestCopy(GrContext* context, const GrSurfaceDesc& dstDesc,
Brian Salomon2a4f9832018-03-03 22:43:43 -0500368 GrSurfaceOrigin, GrSurfaceProxy* srcProxy);
Robert Phillipse2f7d182016-12-15 09:23:05 -0500369
Robert Phillipseaa86252016-11-08 13:49:39 +0000370 bool isWrapped_ForTesting() const;
371
Brian Osman45580d32016-11-23 09:37:01 -0500372 SkDEBUGCODE(void validate(GrContext*) const;)
373
Robert Phillips757914d2017-01-25 15:48:30 -0500374 // Provides access to functions that aren't part of the public API.
Robert Phillips420c4cf2017-09-28 09:00:45 -0400375 inline GrSurfaceProxyPriv priv();
376 inline const GrSurfaceProxyPriv priv() const;
Robert Phillips757914d2017-01-25 15:48:30 -0500377
robertphillips76948d42016-05-04 12:47:41 -0700378protected:
robertphillips8abb3702016-08-31 14:04:06 -0700379 // Deferred version
Brian Salomon2a4f9832018-03-03 22:43:43 -0500380 GrSurfaceProxy(const GrSurfaceDesc& desc, GrSurfaceOrigin origin, SkBackingFit fit,
381 SkBudgeted budgeted, uint32_t flags)
382 : GrSurfaceProxy(nullptr, LazyInstantiationType::kSingleUse, desc, origin, fit,
383 budgeted, flags) {
Robert Phillips294870f2016-11-11 12:38:40 -0500384 // Note: this ctor pulls a new uniqueID from the same pool at the GrGpuResources
robertphillips8abb3702016-08-31 14:04:06 -0700385 }
386
Robert Phillipsce5209a2018-02-13 11:13:51 -0500387 using LazyInstantiateCallback = std::function<sk_sp<GrSurface>(GrResourceProvider*)>;
Robert Phillips777707b2018-01-17 11:40:14 -0500388
Chris Dalton706a6ff2017-11-29 22:01:06 -0700389 // Lazy-callback version
Greg Daniel457469c2018-02-08 15:05:44 -0500390 GrSurfaceProxy(LazyInstantiateCallback&& callback, LazyInstantiationType lazyType,
Brian Salomon2a4f9832018-03-03 22:43:43 -0500391 const GrSurfaceDesc& desc, GrSurfaceOrigin origin, SkBackingFit fit,
392 SkBudgeted budgeted, uint32_t flags);
Chris Dalton706a6ff2017-11-29 22:01:06 -0700393
robertphillips8abb3702016-08-31 14:04:06 -0700394 // Wrapped version
Robert Phillips066f0202017-07-25 10:16:35 -0400395 GrSurfaceProxy(sk_sp<GrSurface> surface, GrSurfaceOrigin origin, SkBackingFit fit);
robertphillips76948d42016-05-04 12:47:41 -0700396
Robert Phillipsf2361d22016-10-25 14:20:06 -0400397 virtual ~GrSurfaceProxy();
398
Robert Phillips757914d2017-01-25 15:48:30 -0500399 friend class GrSurfaceProxyPriv;
400
401 // Methods made available via GrSurfaceProxyPriv
402 bool hasPendingIO() const {
403 return this->internalHasPendingIO();
404 }
405
Robert Phillips7ee385e2017-03-30 08:02:11 -0400406 bool hasPendingWrite() const {
407 return this->internalHasPendingWrite();
408 }
409
Robert Phillips57aa3672017-07-21 11:38:13 -0400410 void computeScratchKey(GrScratchKey*) const;
411
Robert Phillips5af44de2017-07-18 14:49:38 -0400412 virtual sk_sp<GrSurface> createSurface(GrResourceProvider*) const = 0;
413 void assign(sk_sp<GrSurface> surface);
414
Robert Phillips65048132017-08-10 08:44:49 -0400415 sk_sp<GrSurface> createSurfaceImpl(GrResourceProvider*, int sampleCnt, bool needsStencil,
Greg Danield2d8e922018-02-12 12:07:39 -0500416 GrSurfaceFlags flags, GrMipMapped mipMapped) const;
Robert Phillips5af44de2017-07-18 14:49:38 -0400417
Robert Phillips65048132017-08-10 08:44:49 -0400418 bool instantiateImpl(GrResourceProvider* resourceProvider, int sampleCnt, bool needsStencil,
Greg Danield2d8e922018-02-12 12:07:39 -0500419 GrSurfaceFlags flags, GrMipMapped mipMapped, const GrUniqueKey*);
Brian Salomonbb5711a2017-05-17 13:49:59 -0400420
Chris Dalton706a6ff2017-11-29 22:01:06 -0700421private:
Brian Salomonbb5711a2017-05-17 13:49:59 -0400422 // For wrapped resources, 'fConfig', 'fWidth', 'fHeight', and 'fOrigin; will always be filled in
423 // from the wrapped resource.
424 GrPixelConfig fConfig;
425 int fWidth;
426 int fHeight;
427 GrSurfaceOrigin fOrigin;
Chris Dalton706a6ff2017-11-29 22:01:06 -0700428 SkBackingFit fFit; // always kApprox for lazy-callback resources
429 // always kExact for wrapped resources
430 mutable SkBudgeted fBudgeted; // always kYes for lazy-callback resources
431 // set from the backing resource for wrapped resources
Robert Phillipsb726d582017-03-09 16:36:32 -0500432 // mutable bc of SkSurface/SkImage wishy-washiness
Robert Phillipsc787e492017-02-28 11:26:32 -0500433 const uint32_t fFlags;
Robert Phillipsa4c41b32017-03-15 13:02:45 -0400434
Robert Phillips294870f2016-11-11 12:38:40 -0500435 const UniqueID fUniqueID; // set from the backing resource for wrapped resources
robertphillips76948d42016-05-04 12:47:41 -0700436
Chris Dalton706a6ff2017-11-29 22:01:06 -0700437 LazyInstantiateCallback fLazyInstantiateCallback;
Greg Daniel457469c2018-02-08 15:05:44 -0500438 // If this is set to kSingleuse, then after one call to fLazyInstantiateCallback we will cleanup
439 // the lazy callback and then delete it. This will allow for any refs and resources being held
440 // by the standard function to be released. This is specifically useful in non-dll cases where
441 // we make lazy proxies and instantiate them immediately.
442 // Note: This is ignored if fLazyInstantiateCallback is null.
443 LazyInstantiationType fLazyInstantiationType;
Robert Phillipse8fabb22018-02-04 14:33:21 -0500444 SkDEBUGCODE(virtual void validateLazySurface(const GrSurface*) = 0;)
Chris Dalton706a6ff2017-11-29 22:01:06 -0700445
Robert Phillips8bc06d02016-11-01 17:28:40 -0400446 static const size_t kInvalidGpuMemorySize = ~static_cast<size_t>(0);
Robert Phillips29e52f12016-11-03 10:19:14 -0400447 SkDEBUGCODE(size_t getRawGpuMemorySize_debugOnly() const { return fGpuMemorySize; })
448
Brian Salomonbb5711a2017-05-17 13:49:59 -0400449 virtual size_t onUninstantiatedGpuMemorySize() const = 0;
Robert Phillips29e52f12016-11-03 10:19:14 -0400450
Brian Salomond17b4a62017-05-23 16:53:47 -0400451 bool fNeedsClear;
452
Robert Phillips8bc06d02016-11-01 17:28:40 -0400453 // This entry is lazily evaluated so, when the proxy wraps a resource, the resource
454 // will be called but, when the proxy is deferred, it will compute the answer itself.
455 // If the proxy computes its own answer that answer is checked (in debug mode) in
456 // the instantiation method.
457 mutable size_t fGpuMemorySize;
458
Robert Phillipsf2361d22016-10-25 14:20:06 -0400459 // The last opList that wrote to or is currently going to write to this surface
Robert Phillipsb6deea82017-05-11 14:14:30 -0400460 // The opList can be closed (e.g., no surface context is currently bound
461 // to this proxy).
Robert Phillipsf2361d22016-10-25 14:20:06 -0400462 // This back-pointer is required so that we can add a dependancy between
463 // the opList used to create the current contents of this surface
464 // and the opList of a destination surface to which this one is being drawn or copied.
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400465 // This pointer is unreffed. OpLists own a ref on their surface proxies.
Robert Phillipsf2361d22016-10-25 14:20:06 -0400466 GrOpList* fLastOpList;
467
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400468 typedef GrIORefProxy INHERITED;
robertphillips76948d42016-05-04 12:47:41 -0700469};
470
471#endif