blob: e5bed41fde8ab32d4abc3bc42085820974e32d8b [file] [log] [blame]
robertphillips76948d42016-05-04 12:47:41 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrSurfaceProxy_DEFINED
9#define GrSurfaceProxy_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendSurface.h"
13#include "include/gpu/GrGpuResource.h"
14#include "include/gpu/GrSurface.h"
15#include "include/gpu/GrTexture.h"
16#include "include/private/SkNoncopyable.h"
robertphillips76948d42016-05-04 12:47:41 -070017
Robert Phillips37430132016-11-09 06:50:43 -050018class GrCaps;
Robert Phillips292a6b22019-02-14 14:49:02 -050019class GrContext_Base;
Robert Phillipsc589b0b2017-04-17 07:53:07 -040020class GrOpList;
Robert Phillipsd44146b2019-02-15 14:44:28 -050021class GrRecordingContext;
Brian Osman45580d32016-11-23 09:37:01 -050022class GrRenderTargetOpList;
23class GrRenderTargetProxy;
Brian Osman32342f02017-03-04 08:12:46 -050024class GrResourceProvider;
Robert Phillipsd46697a2017-01-25 12:10:37 -050025class GrSurfaceContext;
Robert Phillips757914d2017-01-25 15:48:30 -050026class GrSurfaceProxyPriv;
Brian Osman45580d32016-11-23 09:37:01 -050027class GrTextureOpList;
robertphillips76948d42016-05-04 12:47:41 -070028class GrTextureProxy;
robertphillips76948d42016-05-04 12:47:41 -070029
Robert Phillipsc7635fa2016-10-28 13:25:24 -040030// This class replicates the functionality GrIORef<GrSurface> but tracks the
31// utilitization for later resource allocation (for the deferred case) and
32// forwards on the utilization in the wrapped case
33class GrIORefProxy : public SkNoncopyable {
34public:
35 void ref() const {
36 this->validate();
37
38 ++fRefCnt;
39 if (fTarget) {
40 fTarget->ref();
41 }
42 }
43
44 void unref() const {
45 this->validate();
46
47 if (fTarget) {
48 fTarget->unref();
49 }
50
Robert Phillipsb6deea82017-05-11 14:14:30 -040051 --fRefCnt;
52 this->didRemoveRefOrPendingIO();
Robert Phillipsc7635fa2016-10-28 13:25:24 -040053 }
54
Chris Daltona32a3c32017-12-05 10:05:21 -070055#ifdef SK_DEBUG
56 bool isUnique_debugOnly() const { // For asserts.
57 SkASSERT(fRefCnt >= 0 && fPendingWrites >= 0 && fPendingReads >= 0);
58 return 1 == fRefCnt + fPendingWrites + fPendingReads;
59 }
60#endif
61
Robert Phillips4bc70112018-03-01 10:24:02 -050062 void release() {
Greg Daniel4684f822018-03-08 15:27:36 -050063 // The proxy itself may still have multiple refs. It can be owned by an SkImage and multiple
64 // SkDeferredDisplayLists at the same time if we are using DDLs.
Robert Phillips4bc70112018-03-01 10:24:02 -050065 SkASSERT(0 == fPendingReads);
66 SkASSERT(0 == fPendingWrites);
67
Robert Phillipse4aae342018-03-14 10:26:57 -040068 // In the current hybrid world, the proxy and backing surface are ref/unreffed in
Brian Salomon0d606762019-01-25 09:58:38 -050069 // synchrony. Each ref we've added or removed to the proxy was mirrored to the backing
70 // surface. Though, that backing surface could be owned by other proxies as well. Remove
71 // a ref from the backing surface for each ref the proxy has since we are about to remove
72 // our pointer to the surface. If this proxy is reinstantiated then all the proxy's refs
73 // get transferred to the (possibly new) backing surface.
74 for (int refs = fRefCnt; refs; --refs) {
Robert Phillipse4aae342018-03-14 10:26:57 -040075 fTarget->unref();
76 }
Robert Phillips4bc70112018-03-01 10:24:02 -050077 fTarget = nullptr;
78 }
79
Robert Phillipsc7635fa2016-10-28 13:25:24 -040080 void validate() const {
Robert Phillipsb6deea82017-05-11 14:14:30 -040081#ifdef SK_DEBUG
82 SkASSERT(fRefCnt >= 0);
robertphillips1125a032016-11-16 11:17:17 -080083 SkASSERT(fPendingReads >= 0);
84 SkASSERT(fPendingWrites >= 0);
85 SkASSERT(fRefCnt + fPendingReads + fPendingWrites >= 1);
86
87 if (fTarget) {
robertphillips1125a032016-11-16 11:17:17 -080088 // The backing GrSurface can have more refs than the proxy if the proxy
89 // started off wrapping an external resource (that came in with refs).
90 // The GrSurface should never have fewer refs than the proxy however.
91 SkASSERT(fTarget->fRefCnt >= fRefCnt);
Robert Phillipsb6deea82017-05-11 14:14:30 -040092 SkASSERT(fTarget->fPendingReads >= fPendingReads);
93 SkASSERT(fTarget->fPendingWrites >= fPendingWrites);
robertphillips1125a032016-11-16 11:17:17 -080094 }
Robert Phillipsc7635fa2016-10-28 13:25:24 -040095#endif
96 }
97
robertphillips1125a032016-11-16 11:17:17 -080098 int32_t getBackingRefCnt_TestOnly() const;
99 int32_t getPendingReadCnt_TestOnly() const;
100 int32_t getPendingWriteCnt_TestOnly() const;
101
Brian Salomoncf75b002017-08-16 10:42:09 -0400102 void addPendingRead() const {
103 this->validate();
104
105 ++fPendingReads;
106 if (fTarget) {
107 fTarget->addPendingRead();
108 }
109 }
110
111 void completedRead() const {
112 this->validate();
113
114 if (fTarget) {
115 fTarget->completedRead();
116 }
117
118 --fPendingReads;
119 this->didRemoveRefOrPendingIO();
120 }
121
122 void addPendingWrite() const {
123 this->validate();
124
125 ++fPendingWrites;
126 if (fTarget) {
127 fTarget->addPendingWrite();
128 }
129 }
130
131 void completedWrite() const {
132 this->validate();
133
134 if (fTarget) {
135 fTarget->completedWrite();
136 }
137
138 --fPendingWrites;
139 this->didRemoveRefOrPendingIO();
140 }
141
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400142protected:
robertphillips1125a032016-11-16 11:17:17 -0800143 GrIORefProxy() : fTarget(nullptr), fRefCnt(1), fPendingReads(0), fPendingWrites(0) {}
144 GrIORefProxy(sk_sp<GrSurface> surface) : fRefCnt(1), fPendingReads(0), fPendingWrites(0) {
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400145 // Since we're manually forwarding on refs & unrefs we don't want sk_sp doing
146 // anything extra.
147 fTarget = surface.release();
148 }
149 virtual ~GrIORefProxy() {
150 // We don't unref 'fTarget' here since the 'unref' method will already
Robert Phillips4bc70112018-03-01 10:24:02 -0500151 // have forwarded on the unref call that got us here.
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400152 }
153
Brian Salomon01ceae92019-04-02 11:49:54 -0400154 // Privileged method that allows going from ref count = 0 to ref count = 1.
Brian Salomon2c791fc2019-04-02 11:52:03 -0400155 void addInitialRef(GrResourceCache* cache) const {
Brian Salomon01ceae92019-04-02 11:49:54 -0400156 this->validate();
157 ++fRefCnt;
158 if (fTarget) {
Brian Salomon2c791fc2019-04-02 11:52:03 -0400159 fTarget->proxyAccess().ref(cache);
Brian Salomon01ceae92019-04-02 11:49:54 -0400160 }
161 }
162
robertphillips1125a032016-11-16 11:17:17 -0800163 // This GrIORefProxy was deferred before but has just been instantiated. To
164 // make all the reffing & unreffing work out we now need to transfer any deferred
165 // refs & unrefs to the new GrSurface
166 void transferRefs() {
167 SkASSERT(fTarget);
Brian Salomon2c791fc2019-04-02 11:52:03 -0400168 // Make sure we're going to take some ownership of our target.
169 SkASSERT(fRefCnt > 0 || fPendingReads > 0 || fPendingWrites > 0);
robertphillips1125a032016-11-16 11:17:17 -0800170
Brian Salomon2c791fc2019-04-02 11:52:03 -0400171 // Transfer pending read/writes first so that if we decrement the target's ref cnt we don't
172 // cause a purge of the target.
robertphillips1125a032016-11-16 11:17:17 -0800173 fTarget->fPendingReads += fPendingReads;
174 fTarget->fPendingWrites += fPendingWrites;
Brian Salomon2c791fc2019-04-02 11:52:03 -0400175 SkASSERT(fTarget->fRefCnt > 0);
176 SkASSERT(fRefCnt >= 0);
177 // Don't xfer the proxy's creation ref. If we're going to subtract a ref do it via unref()
178 // so that proper cache notifications occur.
179 if (!fRefCnt) {
180 fTarget->unref();
181 } else {
182 fTarget->fRefCnt += (fRefCnt - 1);
183 }
robertphillips1125a032016-11-16 11:17:17 -0800184 }
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400185
Robert Phillips5f78adf2019-04-22 12:41:39 -0400186 int32_t internalGetProxyRefCnt() const { return fRefCnt; }
187 int32_t internalGetTotalRefs() const { return fRefCnt + fPendingReads + fPendingWrites; }
Robert Phillips715d08c2018-07-18 13:56:48 -0400188
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400189 // For deferred proxies this will be null. For wrapped proxies it will point to the
190 // wrapped resource.
191 GrSurface* fTarget;
robertphillips1125a032016-11-16 11:17:17 -0800192
193private:
194 // This class is used to manage conversion of refs to pending reads/writes.
Brian Salomonee783962018-08-01 09:55:10 -0400195 template <typename> friend class GrProxyRef;
robertphillips1125a032016-11-16 11:17:17 -0800196
Robert Phillipsb6deea82017-05-11 14:14:30 -0400197 void didRemoveRefOrPendingIO() const {
198 if (0 == fPendingReads && 0 == fPendingWrites && 0 == fRefCnt) {
199 delete this;
200 }
robertphillips1125a032016-11-16 11:17:17 -0800201 }
202
203 mutable int32_t fRefCnt;
204 mutable int32_t fPendingReads;
205 mutable int32_t fPendingWrites;
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400206};
207
208class GrSurfaceProxy : public GrIORefProxy {
robertphillips76948d42016-05-04 12:47:41 -0700209public:
Brian Salomonb6a3a3b2019-04-01 12:29:34 -0400210 /**
211 * Some lazy proxy callbacks want to set their own (or no key) on the GrSurfaces they return.
212 * Others want the GrSurface's key to be kept in sync with the proxy's key. This enum controls
213 * the key relationship between proxies and their targets.
214 */
215 enum class LazyInstantiationKeyMode {
216 /**
217 * Don't key the GrSurface with the proxy's key. The lazy instantiation callback is free to
218 * return a GrSurface that already has a unique key unrelated to the proxy's key.
219 */
220 kUnsynced,
221 /**
222 * Keep the GrSurface's unique key in sync with the proxy's unique key. The GrSurface
223 * returned from the lazy instantiation callback must not have a unique key or have the same
224 * same unique key as the proxy. If the proxy is later assigned a key it is in turn assigned
225 * to the GrSurface.
226 */
227 kSynced
228 };
229
230 struct LazyInstantiationResult {
231 LazyInstantiationResult() = default;
232 LazyInstantiationResult(const LazyInstantiationResult&) = default;
233 LazyInstantiationResult(LazyInstantiationResult&& that) = default;
234 LazyInstantiationResult(sk_sp<GrSurface> surf,
235 LazyInstantiationKeyMode mode = LazyInstantiationKeyMode::kSynced)
236 : fSurface(std::move(surf)), fKeyMode(mode) {}
237 LazyInstantiationResult(sk_sp<GrTexture> tex)
238 : LazyInstantiationResult(sk_sp<GrSurface>(std::move(tex))) {}
239
240 LazyInstantiationResult& operator=(const LazyInstantiationResult&) = default;
241 LazyInstantiationResult& operator=(LazyInstantiationResult&&) = default;
242
243 sk_sp<GrSurface> fSurface;
244 LazyInstantiationKeyMode fKeyMode = LazyInstantiationKeyMode::kSynced;
245 };
246
247 using LazyInstantiateCallback = std::function<LazyInstantiationResult(GrResourceProvider*)>;
248
Greg Daniel457469c2018-02-08 15:05:44 -0500249 enum class LazyInstantiationType {
Brian Salomon967df202018-12-07 11:15:53 -0500250 kSingleUse, // Instantiation callback is allowed to be called only once.
251 kMultipleUse, // Instantiation callback can be called multiple times.
Brian Salomon876a0172019-03-08 11:12:14 -0500252 kDeinstantiate, // Instantiation callback can be called multiple times,
253 // but we will deinstantiate the proxy after every flush.
Greg Daniel457469c2018-02-08 15:05:44 -0500254 };
255
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500256 enum class LazyState {
Greg Daniel0a375db2018-02-01 12:21:39 -0500257 kNot, // The proxy is instantiated or does not have a lazy callback
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500258 kPartially, // The proxy has a lazy callback but knows basic information about itself.
259 kFully, // The proxy has a lazy callback and also doesn't know its width, height, etc.
260 };
261
262 LazyState lazyInstantiationState() const {
Greg Daniel0a375db2018-02-01 12:21:39 -0500263 if (fTarget || !SkToBool(fLazyInstantiateCallback)) {
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500264 return LazyState::kNot;
265 } else {
266 if (fWidth <= 0) {
267 SkASSERT(fHeight <= 0);
268 return LazyState::kFully;
269 } else {
270 SkASSERT(fHeight > 0);
271 return LazyState::kPartially;
272 }
273 }
274 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700275
276 GrPixelConfig config() const { return fConfig; }
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500277 int width() const {
278 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
279 return fWidth;
280 }
281 int height() const {
282 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
283 return fHeight;
284 }
Brian Salomon7eae3e02018-08-07 14:02:38 +0000285
286 SkISize isize() const { return {fWidth, fHeight}; }
287
Chris Dalton706a6ff2017-11-29 22:01:06 -0700288 int worstCaseWidth() const;
289 int worstCaseHeight() const;
Brian Salomond8eb7b62018-05-25 10:08:05 -0400290 /**
291 * Helper that gets the width and height of the surface as a bounding rectangle.
292 */
293 SkRect getBoundsRect() const {
294 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
295 return SkRect::MakeIWH(this->width(), this->height());
296 }
297 /**
298 * Helper that gets the worst case width and height of the surface as a bounding rectangle.
299 */
300 SkRect getWorstCaseBoundsRect() const {
301 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
302 return SkRect::MakeIWH(this->worstCaseWidth(), this->worstCaseHeight());
303 }
304
robertphillips76948d42016-05-04 12:47:41 -0700305 GrSurfaceOrigin origin() const {
Brian Salomonbb5711a2017-05-17 13:49:59 -0400306 SkASSERT(kTopLeft_GrSurfaceOrigin == fOrigin || kBottomLeft_GrSurfaceOrigin == fOrigin);
307 return fOrigin;
robertphillips76948d42016-05-04 12:47:41 -0700308 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700309
Greg Daniel4065d452018-11-16 15:43:41 -0500310 const GrBackendFormat& backendFormat() const { return fFormat; }
311
Robert Phillips294870f2016-11-11 12:38:40 -0500312 class UniqueID {
313 public:
Robert Phillips7ee385e2017-03-30 08:02:11 -0400314 static UniqueID InvalidID() {
315 return UniqueID(uint32_t(SK_InvalidUniqueID));
316 }
317
Robert Phillips294870f2016-11-11 12:38:40 -0500318 // wrapped
319 explicit UniqueID(const GrGpuResource::UniqueID& id) : fID(id.asUInt()) { }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700320 // deferred and lazy-callback
Robert Phillips294870f2016-11-11 12:38:40 -0500321 UniqueID() : fID(GrGpuResource::CreateUniqueID()) { }
322
323 uint32_t asUInt() const { return fID; }
324
325 bool operator==(const UniqueID& other) const {
326 return fID == other.fID;
327 }
328 bool operator!=(const UniqueID& other) const {
329 return !(*this == other);
330 }
331
Robert Phillips7ee385e2017-03-30 08:02:11 -0400332 void makeInvalid() { fID = SK_InvalidUniqueID; }
Robert Phillips294870f2016-11-11 12:38:40 -0500333 bool isInvalid() const { return SK_InvalidUniqueID == fID; }
334
335 private:
Robert Phillips7ee385e2017-03-30 08:02:11 -0400336 explicit UniqueID(uint32_t id) : fID(id) {}
337
338 uint32_t fID;
Robert Phillips294870f2016-11-11 12:38:40 -0500339 };
340
341 /*
342 * The contract for the uniqueID is:
343 * for wrapped resources:
344 * the uniqueID will match that of the wrapped resource
345 *
346 * for deferred resources:
347 * the uniqueID will be different from the real resource, when it is allocated
348 * the proxy's uniqueID will not change across the instantiate call
349 *
350 * the uniqueIDs of the proxies and the resources draw from the same pool
351 *
352 * What this boils down to is that the uniqueID of a proxy can be used to consistently
353 * track/identify a proxy but should never be used to distinguish between
354 * resources and proxies - beware!
355 */
356 UniqueID uniqueID() const { return fUniqueID; }
robertphillips76948d42016-05-04 12:47:41 -0700357
Robert Phillips159e3c62017-06-19 12:02:00 -0400358 UniqueID underlyingUniqueID() const {
359 if (fTarget) {
360 return UniqueID(fTarget->uniqueID());
361 }
362
363 return fUniqueID;
364 }
365
Robert Phillips10d17212019-04-24 14:09:10 -0400366 virtual bool instantiate(GrResourceProvider*) = 0;
Robert Phillips37430132016-11-09 06:50:43 -0500367
Brian Salomon967df202018-12-07 11:15:53 -0500368 void deinstantiate();
Robert Phillips4bc70112018-03-01 10:24:02 -0500369
robertphillips76948d42016-05-04 12:47:41 -0700370 /**
Brian Salomon7d94bb52018-10-12 14:37:19 -0400371 * Proxies that are already instantiated and whose backing surface cannot be recycled to
372 * instantiate other proxies do not need to be considered by GrResourceAllocator.
373 */
374 bool canSkipResourceAllocator() const;
375
376 /**
robertphillips76948d42016-05-04 12:47:41 -0700377 * @return the texture proxy associated with the surface proxy, may be NULL.
378 */
379 virtual GrTextureProxy* asTextureProxy() { return nullptr; }
380 virtual const GrTextureProxy* asTextureProxy() const { return nullptr; }
381
382 /**
383 * @return the render target proxy associated with the surface proxy, may be NULL.
384 */
385 virtual GrRenderTargetProxy* asRenderTargetProxy() { return nullptr; }
386 virtual const GrRenderTargetProxy* asRenderTargetProxy() const { return nullptr; }
387
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400388 bool isInstantiated() const { return SkToBool(fTarget); }
389
390 // If the proxy is already instantiated, return its backing GrTexture; if not, return null.
391 GrSurface* peekSurface() const { return fTarget; }
392
393 // If this is a texture proxy and the proxy is already instantiated, return its backing
394 // GrTexture; if not, return null.
395 GrTexture* peekTexture() const { return fTarget ? fTarget->asTexture() : nullptr; }
396
397 // If this is a render target proxy and the proxy is already instantiated, return its backing
398 // GrRenderTarget; if not, return null.
399 GrRenderTarget* peekRenderTarget() const {
400 return fTarget ? fTarget->asRenderTarget() : nullptr;
401 }
402
robertphillips13a7eee2016-08-31 15:06:24 -0700403 /**
404 * Does the resource count against the resource budget?
405 */
406 SkBudgeted isBudgeted() const { return fBudgeted; }
407
Brian Salomonc67c31c2018-12-06 10:00:03 -0500408 /**
409 * The pixel values of this proxy's surface cannot be modified (e.g. doesn't support write
Brian Salomon9cadc312018-12-05 15:09:19 -0500410 * pixels or MIP map level regen). Read-only proxies also bypass interval tracking and
411 * assignment in GrResourceAllocator.
Brian Salomonc67c31c2018-12-06 10:00:03 -0500412 */
413 bool readOnly() const { return fSurfaceFlags & GrInternalSurfaceFlags::kReadOnly; }
414
Robert Phillipsf2361d22016-10-25 14:20:06 -0400415 void setLastOpList(GrOpList* opList);
416 GrOpList* getLastOpList() { return fLastOpList; }
417
Brian Osman45580d32016-11-23 09:37:01 -0500418 GrRenderTargetOpList* getLastRenderTargetOpList();
419 GrTextureOpList* getLastTextureOpList();
420
Robert Phillips8bc06d02016-11-01 17:28:40 -0400421 /**
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400422 * Retrieves the amount of GPU memory that will be or currently is used by this resource
Robert Phillips8bc06d02016-11-01 17:28:40 -0400423 * in bytes. It is approximate since we aren't aware of additional padding or copies made
424 * by the driver.
425 *
426 * @return the amount of GPU memory used in bytes
427 */
428 size_t gpuMemorySize() const {
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500429 SkASSERT(LazyState::kFully != this->lazyInstantiationState());
Brian Salomonbb5711a2017-05-17 13:49:59 -0400430 if (fTarget) {
431 return fTarget->gpuMemorySize();
432 }
Robert Phillips8bc06d02016-11-01 17:28:40 -0400433 if (kInvalidGpuMemorySize == fGpuMemorySize) {
Brian Salomonbb5711a2017-05-17 13:49:59 -0400434 fGpuMemorySize = this->onUninstantiatedGpuMemorySize();
Robert Phillips8bc06d02016-11-01 17:28:40 -0400435 SkASSERT(kInvalidGpuMemorySize != fGpuMemorySize);
436 }
437 return fGpuMemorySize;
438 }
439
Greg Daniel46cfbc62019-06-07 11:43:30 -0400440 enum class RectsMustMatch : bool {
441 kNo = false,
442 kYes = true
443 };
444
Robert Phillipse2f7d182016-12-15 09:23:05 -0500445 // Helper function that creates a temporary SurfaceContext to perform the copy
Brian Salomonfee3f9b2018-12-19 12:34:12 -0500446 // The copy is is not a render target and not multisampled.
Robert Phillipsd44146b2019-02-15 14:44:28 -0500447 static sk_sp<GrTextureProxy> Copy(GrRecordingContext*, GrSurfaceProxy* src, GrMipMapped,
Greg Daniel46cfbc62019-06-07 11:43:30 -0400448 SkIRect srcRect, SkBackingFit, SkBudgeted,
449 RectsMustMatch = RectsMustMatch::kNo);
Robert Phillipse2f7d182016-12-15 09:23:05 -0500450
451 // Copy the entire 'src'
Robert Phillipsd44146b2019-02-15 14:44:28 -0500452 static sk_sp<GrTextureProxy> Copy(GrRecordingContext*, GrSurfaceProxy* src, GrMipMapped,
453 SkBackingFit, SkBudgeted);
Robert Phillipse2f7d182016-12-15 09:23:05 -0500454
Robert Phillipseaa86252016-11-08 13:49:39 +0000455 bool isWrapped_ForTesting() const;
456
Robert Phillips292a6b22019-02-14 14:49:02 -0500457 SkDEBUGCODE(void validate(GrContext_Base*) const;)
Brian Osman45580d32016-11-23 09:37:01 -0500458
Robert Phillips757914d2017-01-25 15:48:30 -0500459 // Provides access to functions that aren't part of the public API.
Robert Phillips420c4cf2017-09-28 09:00:45 -0400460 inline GrSurfaceProxyPriv priv();
461 inline const GrSurfaceProxyPriv priv() const;
Robert Phillips757914d2017-01-25 15:48:30 -0500462
Brian Salomon01ceae92019-04-02 11:49:54 -0400463 /**
464 * Provides privileged access to select callers to be able to add a ref to a GrSurfaceProxy
465 * with zero refs.
466 */
467 class FirstRefAccess;
468 inline FirstRefAccess firstRefAccess();
469
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400470 GrInternalSurfaceFlags testingOnly_getFlags() const;
471
robertphillips76948d42016-05-04 12:47:41 -0700472protected:
robertphillips8abb3702016-08-31 14:04:06 -0700473 // Deferred version
Greg Daniel4065d452018-11-16 15:43:41 -0500474 GrSurfaceProxy(const GrBackendFormat& format, const GrSurfaceDesc& desc,
475 GrSurfaceOrigin origin, SkBackingFit fit,
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400476 SkBudgeted budgeted, GrInternalSurfaceFlags surfaceFlags)
Greg Daniel4065d452018-11-16 15:43:41 -0500477 : GrSurfaceProxy(nullptr, LazyInstantiationType::kSingleUse, format, desc, origin, fit,
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400478 budgeted, surfaceFlags) {
Robert Phillips294870f2016-11-11 12:38:40 -0500479 // Note: this ctor pulls a new uniqueID from the same pool at the GrGpuResources
robertphillips8abb3702016-08-31 14:04:06 -0700480 }
481
Chris Dalton706a6ff2017-11-29 22:01:06 -0700482 // Lazy-callback version
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400483 GrSurfaceProxy(LazyInstantiateCallback&&, LazyInstantiationType,
Greg Daniel4065d452018-11-16 15:43:41 -0500484 const GrBackendFormat& format, const GrSurfaceDesc&, GrSurfaceOrigin,
485 SkBackingFit, SkBudgeted, GrInternalSurfaceFlags);
Chris Dalton706a6ff2017-11-29 22:01:06 -0700486
Brian Salomonc67c31c2018-12-06 10:00:03 -0500487 // Wrapped version.
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400488 GrSurfaceProxy(sk_sp<GrSurface>, GrSurfaceOrigin, SkBackingFit);
robertphillips76948d42016-05-04 12:47:41 -0700489
Robert Phillipsf2361d22016-10-25 14:20:06 -0400490 virtual ~GrSurfaceProxy();
491
Robert Phillips757914d2017-01-25 15:48:30 -0500492 friend class GrSurfaceProxyPriv;
493
494 // Methods made available via GrSurfaceProxyPriv
Robert Phillips5f78adf2019-04-22 12:41:39 -0400495 bool ignoredByResourceAllocator() const { return fIgnoredByResourceAllocator; }
496 void setIgnoredByResourceAllocator() { fIgnoredByResourceAllocator = true; }
497
498 int32_t getProxyRefCnt() const { return this->internalGetProxyRefCnt(); }
499 int32_t getTotalRefs() const { return this->internalGetTotalRefs(); }
Robert Phillips715d08c2018-07-18 13:56:48 -0400500
Robert Phillips57aa3672017-07-21 11:38:13 -0400501 void computeScratchKey(GrScratchKey*) const;
502
Robert Phillips5af44de2017-07-18 14:49:38 -0400503 virtual sk_sp<GrSurface> createSurface(GrResourceProvider*) const = 0;
504 void assign(sk_sp<GrSurface> surface);
505
Robert Phillips65048132017-08-10 08:44:49 -0400506 sk_sp<GrSurface> createSurfaceImpl(GrResourceProvider*, int sampleCnt, bool needsStencil,
Robert Phillips10d17212019-04-24 14:09:10 -0400507 GrSurfaceDescFlags, GrMipMapped) const;
Robert Phillips5af44de2017-07-18 14:49:38 -0400508
Chris Daltonf91b7552019-04-29 16:21:18 -0600509 // Once the size of a fully-lazy proxy is decided, and before it gets instantiated, the client
510 // can use this optional method to specify the proxy's size. (A proxy's size can be less than
511 // the GPU surface that backs it. e.g., SkBackingFit::kApprox.) Otherwise, the proxy's size will
512 // be set to match the underlying GPU surface upon instantiation.
513 void setLazySize(int width, int height) {
514 SkASSERT(GrSurfaceProxy::LazyState::kFully == this->lazyInstantiationState());
515 SkASSERT(width > 0 && height > 0);
516 fWidth = width;
517 fHeight = height;
518 }
519
Robert Phillips65048132017-08-10 08:44:49 -0400520 bool instantiateImpl(GrResourceProvider* resourceProvider, int sampleCnt, bool needsStencil,
Robert Phillips10d17212019-04-24 14:09:10 -0400521 GrSurfaceDescFlags descFlags, GrMipMapped, const GrUniqueKey*);
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400522
Greg Daniele3204862018-04-16 11:24:10 -0400523 // In many cases these flags aren't actually known until the proxy has been instantiated.
524 // However, Ganesh frequently needs to change its behavior based on these settings. For
525 // internally create proxies we will know these properties ahead of time. For wrapped
526 // proxies we will copy the properties off of the GrSurface. For lazy proxies we force the
527 // call sites to provide the required information ahead of time. At instantiation time
528 // we verify that the assumed properties match the actual properties.
529 GrInternalSurfaceFlags fSurfaceFlags;
530
Chris Dalton706a6ff2017-11-29 22:01:06 -0700531private:
Greg Daniel4065d452018-11-16 15:43:41 -0500532 // For wrapped resources, 'fFormat', 'fConfig', 'fWidth', 'fHeight', and 'fOrigin; will always
533 // be filled in from the wrapped resource.
534 GrBackendFormat fFormat;
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400535 GrPixelConfig fConfig;
536 int fWidth;
537 int fHeight;
538 GrSurfaceOrigin fOrigin;
539 SkBackingFit fFit; // always kApprox for lazy-callback resources
540 // always kExact for wrapped resources
541 mutable SkBudgeted fBudgeted; // always kYes for lazy-callback resources
542 // set from the backing resource for wrapped resources
543 // mutable bc of SkSurface/SkImage wishy-washiness
Robert Phillipsa4c41b32017-03-15 13:02:45 -0400544
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400545 const UniqueID fUniqueID; // set from the backing resource for wrapped resources
robertphillips76948d42016-05-04 12:47:41 -0700546
Chris Dalton706a6ff2017-11-29 22:01:06 -0700547 LazyInstantiateCallback fLazyInstantiateCallback;
Greg Daniel457469c2018-02-08 15:05:44 -0500548 // If this is set to kSingleuse, then after one call to fLazyInstantiateCallback we will cleanup
549 // the lazy callback and then delete it. This will allow for any refs and resources being held
550 // by the standard function to be released. This is specifically useful in non-dll cases where
551 // we make lazy proxies and instantiate them immediately.
552 // Note: This is ignored if fLazyInstantiateCallback is null.
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400553 LazyInstantiationType fLazyInstantiationType;
Greg Daniel849dce12018-04-24 14:32:53 -0400554
555 SkDEBUGCODE(void validateSurface(const GrSurface*);)
556 SkDEBUGCODE(virtual void onValidateSurface(const GrSurface*) = 0;)
Chris Dalton706a6ff2017-11-29 22:01:06 -0700557
Robert Phillips8bc06d02016-11-01 17:28:40 -0400558 static const size_t kInvalidGpuMemorySize = ~static_cast<size_t>(0);
Robert Phillips29e52f12016-11-03 10:19:14 -0400559 SkDEBUGCODE(size_t getRawGpuMemorySize_debugOnly() const { return fGpuMemorySize; })
560
Brian Salomonbb5711a2017-05-17 13:49:59 -0400561 virtual size_t onUninstantiatedGpuMemorySize() const = 0;
Robert Phillips29e52f12016-11-03 10:19:14 -0400562
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400563 bool fNeedsClear;
Robert Phillips5f78adf2019-04-22 12:41:39 -0400564 bool fIgnoredByResourceAllocator = false;
Brian Salomond17b4a62017-05-23 16:53:47 -0400565
Robert Phillips8bc06d02016-11-01 17:28:40 -0400566 // This entry is lazily evaluated so, when the proxy wraps a resource, the resource
567 // will be called but, when the proxy is deferred, it will compute the answer itself.
568 // If the proxy computes its own answer that answer is checked (in debug mode) in
569 // the instantiation method.
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400570 mutable size_t fGpuMemorySize;
Robert Phillips8bc06d02016-11-01 17:28:40 -0400571
Robert Phillipsf2361d22016-10-25 14:20:06 -0400572 // The last opList that wrote to or is currently going to write to this surface
Robert Phillipsb6deea82017-05-11 14:14:30 -0400573 // The opList can be closed (e.g., no surface context is currently bound
574 // to this proxy).
Robert Phillipsf2361d22016-10-25 14:20:06 -0400575 // This back-pointer is required so that we can add a dependancy between
576 // the opList used to create the current contents of this surface
577 // and the opList of a destination surface to which this one is being drawn or copied.
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400578 // This pointer is unreffed. OpLists own a ref on their surface proxies.
Robert Phillipsfe0253f2018-03-16 16:47:25 -0400579 GrOpList* fLastOpList;
Robert Phillipsf2361d22016-10-25 14:20:06 -0400580
Robert Phillipsc7635fa2016-10-28 13:25:24 -0400581 typedef GrIORefProxy INHERITED;
robertphillips76948d42016-05-04 12:47:41 -0700582};
583
Brian Salomon01ceae92019-04-02 11:49:54 -0400584class GrSurfaceProxy::FirstRefAccess {
585private:
Brian Salomon2c791fc2019-04-02 11:52:03 -0400586 void ref(GrResourceCache* cache) { fProxy->addInitialRef(cache); }
Brian Salomon01ceae92019-04-02 11:49:54 -0400587
588 FirstRefAccess(GrSurfaceProxy* proxy) : fProxy(proxy) {}
589
590 // No taking addresses of this type.
591 const FirstRefAccess* operator&() const = delete;
592 FirstRefAccess* operator&() = delete;
593
594 GrSurfaceProxy* fProxy;
595
596 friend class GrSurfaceProxy;
597 friend class GrProxyProvider;
598 friend class GrDeinstantiateProxyTracker;
599};
600
601inline GrSurfaceProxy::FirstRefAccess GrSurfaceProxy::firstRefAccess() {
602 return FirstRefAccess(this);
603}
604
robertphillips76948d42016-05-04 12:47:41 -0700605#endif