blob: 92a35a2647b12bad43d0cab3334d2b89261075bc [file] [log] [blame]
joshualitt4d8da812015-01-28 12:53:54 -08001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Brian Salomon53e4c3c2016-12-21 11:38:53 -05008#ifndef GrOp_DEFINED
9#define GrOp_DEFINED
joshualitt4d8da812015-01-28 12:53:54 -080010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkMatrix.h"
12#include "include/core/SkRect.h"
13#include "include/core/SkString.h"
14#include "include/gpu/GrGpuResource.h"
15#include "src/gpu/GrNonAtomicRef.h"
16#include "src/gpu/GrTracing.h"
17#include "src/gpu/GrXferProcessor.h"
Mike Klein0ec1c572018-12-04 11:52:51 -050018#include <atomic>
19#include <new>
joshualitt4d8da812015-01-28 12:53:54 -080020
bsalomon16b99132015-08-13 14:55:50 -070021class GrCaps;
egdaniel9cb63402016-06-23 08:37:05 -070022class GrGpuCommandBuffer;
Brian Salomon742e31d2016-12-07 17:06:19 -050023class GrOpFlushState;
Robert Phillipse3302df2017-04-24 07:31:02 -040024class GrRenderTargetOpList;
joshualitt4d8da812015-01-28 12:53:54 -080025
bsalomonabd30f52015-08-13 13:34:48 -070026/**
Brian Salomon53e4c3c2016-12-21 11:38:53 -050027 * GrOp is the base class for all Ganesh deferred GPU operations. To facilitate reordering and to
28 * minimize draw calls, Ganesh does not generate geometry inline with draw calls. Instead, it
29 * captures the arguments to the draw and then generates the geometry when flushing. This gives GrOp
30 * subclasses complete freedom to decide how/when to combine in order to produce fewer draw calls
31 * and minimize state changes.
joshualitt4d8da812015-01-28 12:53:54 -080032 *
Brian Salomond25f5bc2018-08-08 11:25:17 -040033 * Ops of the same subclass may be merged or chained using combineIfPossible. When two ops merge,
34 * one takes on the union of the data and the other is left empty. The merged op becomes responsible
35 * for drawing the data from both the original ops. When ops are chained each op maintains its own
36 * data but they are linked in a list and the head op becomes responsible for executing the work for
37 * the chain.
bsalomondb4758c2015-11-23 11:14:20 -080038 *
Brian Salomon588cec72018-11-14 13:56:37 -050039 * It is required that chainability is transitive. Moreover, if op A is able to merge with B then
40 * it must be the case that any op that can chain with A will either merge or chain with any op
41 * that can chain to B.
42 *
Brian Salomon25a88092016-12-01 09:36:50 -050043 * The bounds of the op must contain all the vertices in device space *irrespective* of the clip.
bsalomondb4758c2015-11-23 11:14:20 -080044 * The bounds are used in determining which clip elements must be applied and thus the bounds cannot
45 * in turn depend upon the clip.
joshualitt4d8da812015-01-28 12:53:54 -080046 */
Brian Salomon25a88092016-12-01 09:36:50 -050047#define GR_OP_SPEW 0
48#if GR_OP_SPEW
49 #define GrOP_SPEW(code) code
50 #define GrOP_INFO(...) SkDebugf(__VA_ARGS__)
joshualittca1f07e2015-08-07 08:11:19 -070051#else
Brian Salomon25a88092016-12-01 09:36:50 -050052 #define GrOP_SPEW(code)
53 #define GrOP_INFO(...)
joshualittca1f07e2015-08-07 08:11:19 -070054#endif
joshualitt4d8da812015-01-28 12:53:54 -080055
Robert Phillips27483912018-04-20 12:43:18 -040056// Print out op information at flush time
57#define GR_FLUSH_TIME_OP_SPEW 0
58
reed1b55a962015-09-17 20:16:13 -070059// A helper macro to generate a class static id
Brian Salomon25a88092016-12-01 09:36:50 -050060#define DEFINE_OP_CLASS_ID \
reed1b55a962015-09-17 20:16:13 -070061 static uint32_t ClassID() { \
Brian Salomon25a88092016-12-01 09:36:50 -050062 static uint32_t kClassID = GenOpClassID(); \
reed1b55a962015-09-17 20:16:13 -070063 return kClassID; \
64 }
65
Brian Salomonf8334782017-01-03 09:42:58 -050066class GrOp : private SkNoncopyable {
joshualitt4d8da812015-01-28 12:53:54 -080067public:
Brian Salomon588cec72018-11-14 13:56:37 -050068 virtual ~GrOp() = default;
joshualitt4d8da812015-01-28 12:53:54 -080069
70 virtual const char* name() const = 0;
joshualitt4d8da812015-01-28 12:53:54 -080071
Chris Dalton7eb5c0f2019-05-23 15:15:47 -060072 using VisitProxyFunc = std::function<void(GrSurfaceProxy*, GrMipMapped)>;
Robert Phillipsb493eeb2017-09-13 13:10:52 -040073
Chris Dalton1706cbf2019-05-21 19:35:29 -060074 virtual void visitProxies(const VisitProxyFunc&) const {
Robert Phillipsb493eeb2017-09-13 13:10:52 -040075 // This default implementation assumes the op has no proxies
76 }
77
Brian Salomon7eae3e02018-08-07 14:02:38 +000078 enum class CombineResult {
79 /**
80 * The op that combineIfPossible was called on now represents its own work plus that of
Brian Salomond25f5bc2018-08-08 11:25:17 -040081 * the passed op. The passed op should be destroyed without being flushed. Currently it
82 * is not legal to merge an op passed to combineIfPossible() the passed op is already in a
83 * chain (though the op on which combineIfPossible() was called may be).
Brian Salomon7eae3e02018-08-07 14:02:38 +000084 */
85 kMerged,
86 /**
Brian Salomond25f5bc2018-08-08 11:25:17 -040087 * The caller *may* (but is not required) to chain these ops together. If they are chained
88 * then prepare() and execute() will be called on the head op but not the other ops in the
89 * chain. The head op will prepare and execute on behalf of all the ops in the chain.
90 */
91 kMayChain,
92 /**
Brian Salomon7eae3e02018-08-07 14:02:38 +000093 * The ops cannot be combined.
94 */
95 kCannotCombine
96 };
97
Brian Salomond25f5bc2018-08-08 11:25:17 -040098 CombineResult combineIfPossible(GrOp* that, const GrCaps& caps);
joshualitt4d8da812015-01-28 12:53:54 -080099
bsalomon88cf17d2016-07-08 06:40:56 -0700100 const SkRect& bounds() const {
101 SkASSERT(kUninitialized_BoundsFlag != fBoundsFlags);
102 return fBounds;
103 }
104
Brian Salomon9e50f7b2017-03-06 12:02:34 -0500105 void setClippedBounds(const SkRect& clippedBounds) {
106 fBounds = clippedBounds;
107 // The clipped bounds already incorporate any effect of the bounds flags.
108 fBoundsFlags = 0;
109 }
110
bsalomon88cf17d2016-07-08 06:40:56 -0700111 bool hasAABloat() const {
112 SkASSERT(fBoundsFlags != kUninitialized_BoundsFlag);
113 return SkToBool(fBoundsFlags & kAABloat_BoundsFlag);
114 }
115
116 bool hasZeroArea() const {
117 SkASSERT(fBoundsFlags != kUninitialized_BoundsFlag);
118 return SkToBool(fBoundsFlags & kZeroArea_BoundsFlag);
119 }
joshualitt99c7c072015-05-01 13:43:30 -0700120
Robert Phillipsc994a932018-06-19 13:09:54 -0400121#ifdef SK_DEBUG
122 // All GrOp-derived classes should be allocated in and deleted from a GrMemoryPool
joshualitt4d8da812015-01-28 12:53:54 -0800123 void* operator new(size_t size);
124 void operator delete(void* target);
125
126 void* operator new(size_t size, void* placement) {
127 return ::operator new(size, placement);
128 }
129 void operator delete(void* target, void* placement) {
130 ::operator delete(target, placement);
131 }
Robert Phillipsc994a932018-06-19 13:09:54 -0400132#endif
joshualitt4d8da812015-01-28 12:53:54 -0800133
134 /**
Brian Salomon25a88092016-12-01 09:36:50 -0500135 * Helper for safely down-casting to a GrOp subclass
bsalomonabd30f52015-08-13 13:34:48 -0700136 */
reed1b55a962015-09-17 20:16:13 -0700137 template <typename T> const T& cast() const {
138 SkASSERT(T::ClassID() == this->classID());
139 return *static_cast<const T*>(this);
140 }
141
142 template <typename T> T* cast() {
143 SkASSERT(T::ClassID() == this->classID());
144 return static_cast<T*>(this);
145 }
joshualitt4d8da812015-01-28 12:53:54 -0800146
Brian Salomon25a88092016-12-01 09:36:50 -0500147 uint32_t classID() const { SkASSERT(kIllegalOpID != fClassID); return fClassID; }
joshualitt4d8da812015-01-28 12:53:54 -0800148
joshualitt08e65e72016-03-08 09:31:15 -0800149 // We lazily initialize the uniqueID because currently the only user is GrAuditTrail
halcanary9d524f22016-03-29 09:03:52 -0700150 uint32_t uniqueID() const {
Brian Salomon25a88092016-12-01 09:36:50 -0500151 if (kIllegalOpID == fUniqueID) {
152 fUniqueID = GenOpID();
joshualitt08e65e72016-03-08 09:31:15 -0800153 }
halcanary9d524f22016-03-29 09:03:52 -0700154 return fUniqueID;
joshualitt08e65e72016-03-08 09:31:15 -0800155 }
joshualittca1f07e2015-08-07 08:11:19 -0700156
Brian Salomonbde42852016-12-21 11:37:49 -0500157 /**
158 * Called prior to executing. The op should perform any resource creation or data transfers
159 * necessary before execute() is called.
160 */
Brian Salomon742e31d2016-12-07 17:06:19 -0500161 void prepare(GrOpFlushState* state) { this->onPrepare(state); }
bsalomon53469832015-08-18 09:20:09 -0700162
Brian Salomon25a88092016-12-01 09:36:50 -0500163 /** Issues the op's commands to GrGpu. */
Brian Salomon588cec72018-11-14 13:56:37 -0500164 void execute(GrOpFlushState* state, const SkRect& chainBounds) {
Xiao Yu97126012018-04-25 18:11:44 -0700165 TRACE_EVENT0("skia", name());
Brian Salomon588cec72018-11-14 13:56:37 -0500166 this->onExecute(state, chainBounds);
Xiao Yu97126012018-04-25 18:11:44 -0700167 }
bsalomon53469832015-08-18 09:20:09 -0700168
Brian Salomon25a88092016-12-01 09:36:50 -0500169 /** Used for spewing information about ops when debugging. */
Brian Osman9a390ac2018-11-12 09:47:48 -0500170#ifdef SK_DEBUG
robertphillips44fbc792016-06-29 06:56:12 -0700171 virtual SkString dumpInfo() const {
172 SkString string;
Brian Salomon25a88092016-12-01 09:36:50 -0500173 string.appendf("OpBounds: [L: %.2f, T: %.2f, R: %.2f, B: %.2f]\n",
robertphillips44fbc792016-06-29 06:56:12 -0700174 fBounds.fLeft, fBounds.fTop, fBounds.fRight, fBounds.fBottom);
175 return string;
176 }
Brian Osman9a390ac2018-11-12 09:47:48 -0500177#else
178 SkString dumpInfo() const { return SkString("<Op information unavailable>"); }
179#endif
bsalomon53469832015-08-18 09:20:09 -0700180
Brian Salomond25f5bc2018-08-08 11:25:17 -0400181 /**
182 * A helper for iterating over an op chain in a range for loop that also downcasts to a GrOp
183 * subclass. E.g.:
184 * for (MyOpSubClass& op : ChainRange<MyOpSubClass>(this)) {
185 * // ...
186 * }
187 */
Brian Salomon588cec72018-11-14 13:56:37 -0500188 template <typename OpSubclass = GrOp> class ChainRange {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400189 private:
190 class Iter {
191 public:
Brian Salomon588cec72018-11-14 13:56:37 -0500192 explicit Iter(const OpSubclass* head) : fCurr(head) {}
193 inline Iter& operator++() {
194 return *this = Iter(static_cast<const OpSubclass*>(fCurr->nextInChain()));
195 }
196 const OpSubclass& operator*() const { return *fCurr; }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400197 bool operator!=(const Iter& that) const { return fCurr != that.fCurr; }
198
199 private:
Brian Salomon588cec72018-11-14 13:56:37 -0500200 const OpSubclass* fCurr;
Brian Salomond25f5bc2018-08-08 11:25:17 -0400201 };
Brian Salomon588cec72018-11-14 13:56:37 -0500202 const OpSubclass* fHead;
Brian Salomond25f5bc2018-08-08 11:25:17 -0400203
204 public:
Brian Salomon588cec72018-11-14 13:56:37 -0500205 explicit ChainRange(const OpSubclass* head) : fHead(head) {}
Brian Salomond25f5bc2018-08-08 11:25:17 -0400206 Iter begin() { return Iter(fHead); }
207 Iter end() { return Iter(nullptr); }
208 };
209
Brian Salomon588cec72018-11-14 13:56:37 -0500210 /**
211 * Concatenates two op chains. This op must be a tail and the passed op must be a head. The ops
212 * must be of the same subclass.
213 */
214 void chainConcat(std::unique_ptr<GrOp>);
Brian Salomond25f5bc2018-08-08 11:25:17 -0400215 /** Returns true if this is the head of a chain (including a length 1 chain). */
Brian Salomon588cec72018-11-14 13:56:37 -0500216 bool isChainHead() const { return !fPrevInChain; }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400217 /** Returns true if this is the tail of a chain (including a length 1 chain). */
218 bool isChainTail() const { return !fNextInChain; }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400219 /** The next op in the chain. */
Brian Salomon588cec72018-11-14 13:56:37 -0500220 GrOp* nextInChain() const { return fNextInChain.get(); }
221 /** The previous op in the chain. */
222 GrOp* prevInChain() const { return fPrevInChain; }
223 /**
224 * Cuts the chain after this op. The returned op is the op that was previously next in the
225 * chain or null if this was already a tail.
226 */
227 std::unique_ptr<GrOp> cutChain();
228 SkDEBUGCODE(void validateChain(GrOp* expectedTail = nullptr) const);
Brian Salomond25f5bc2018-08-08 11:25:17 -0400229
Ethan Nicholas029b22c2018-10-18 16:49:56 -0400230#ifdef SK_DEBUG
231 virtual void validate() const {}
232#endif
233
joshualitt4d8da812015-01-28 12:53:54 -0800234protected:
Brian Salomond25f5bc2018-08-08 11:25:17 -0400235 GrOp(uint32_t classID);
236
bsalomon88cf17d2016-07-08 06:40:56 -0700237 /**
Brian Salomon25a88092016-12-01 09:36:50 -0500238 * Indicates that the op will produce geometry that extends beyond its bounds for the
bsalomon88cf17d2016-07-08 06:40:56 -0700239 * purpose of ensuring that the fragment shader runs on partially covered pixels for
240 * non-MSAA antialiasing.
241 */
Chris Dalton3b51df12017-11-27 14:33:06 -0700242 enum class HasAABloat : bool {
243 kNo = false,
244 kYes = true
bsalomon88cf17d2016-07-08 06:40:56 -0700245 };
246 /**
Brian Salomon25a88092016-12-01 09:36:50 -0500247 * Indicates that the geometry represented by the op has zero area (e.g. it is hairline or
248 * points).
bsalomon88cf17d2016-07-08 06:40:56 -0700249 */
Chris Dalton3b51df12017-11-27 14:33:06 -0700250 enum class IsZeroArea : bool {
251 kNo = false,
252 kYes = true
bsalomon88cf17d2016-07-08 06:40:56 -0700253 };
Robert Phillips65a88fa2017-08-08 08:36:22 -0400254
bsalomon88cf17d2016-07-08 06:40:56 -0700255 void setBounds(const SkRect& newBounds, HasAABloat aabloat, IsZeroArea zeroArea) {
256 fBounds = newBounds;
257 this->setBoundsFlags(aabloat, zeroArea);
258 }
259 void setTransformedBounds(const SkRect& srcBounds, const SkMatrix& m,
260 HasAABloat aabloat, IsZeroArea zeroArea) {
261 m.mapRect(&fBounds, srcBounds);
262 this->setBoundsFlags(aabloat, zeroArea);
263 }
Robert Phillips65a88fa2017-08-08 08:36:22 -0400264 void makeFullScreen(GrSurfaceProxy* proxy) {
265 this->setBounds(SkRect::MakeIWH(proxy->width(), proxy->height()),
266 HasAABloat::kNo, IsZeroArea::kNo);
267 }
joshualitt99c7c072015-05-01 13:43:30 -0700268
Brian Salomonb41417f2018-10-24 08:58:48 -0400269 static uint32_t GenOpClassID() { return GenID(&gCurrOpClassID); }
270
271private:
Brian Salomon25a88092016-12-01 09:36:50 -0500272 void joinBounds(const GrOp& that) {
bsalomon88cf17d2016-07-08 06:40:56 -0700273 if (that.hasAABloat()) {
274 fBoundsFlags |= kAABloat_BoundsFlag;
275 }
276 if (that.hasZeroArea()) {
277 fBoundsFlags |= kZeroArea_BoundsFlag;
278 }
279 return fBounds.joinPossiblyEmptyRect(that.fBounds);
280 }
281
Brian Salomon7eae3e02018-08-07 14:02:38 +0000282 virtual CombineResult onCombineIfPossible(GrOp*, const GrCaps&) {
283 return CombineResult::kCannotCombine;
284 }
bsalomonabd30f52015-08-13 13:34:48 -0700285
Brian Salomon742e31d2016-12-07 17:06:19 -0500286 virtual void onPrepare(GrOpFlushState*) = 0;
Brian Salomon588cec72018-11-14 13:56:37 -0500287 // If this op is chained then chainBounds is the union of the bounds of all ops in the chain.
288 // Otherwise, this op's bounds.
289 virtual void onExecute(GrOpFlushState*, const SkRect& chainBounds) = 0;
bsalomon53469832015-08-18 09:20:09 -0700290
Mike Klein0ec1c572018-12-04 11:52:51 -0500291 static uint32_t GenID(std::atomic<uint32_t>* idCounter) {
292 uint32_t id = (*idCounter)++;
293 if (id == 0) {
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400294 SK_ABORT("This should never wrap as it should only be called once for each GrOp "
bsalomonabd30f52015-08-13 13:34:48 -0700295 "subclass.");
296 }
297 return id;
298 }
299
bsalomon88cf17d2016-07-08 06:40:56 -0700300 void setBoundsFlags(HasAABloat aabloat, IsZeroArea zeroArea) {
301 fBoundsFlags = 0;
302 fBoundsFlags |= (HasAABloat::kYes == aabloat) ? kAABloat_BoundsFlag : 0;
303 fBoundsFlags |= (IsZeroArea ::kYes == zeroArea) ? kZeroArea_BoundsFlag : 0;
304 }
305
bsalomonabd30f52015-08-13 13:34:48 -0700306 enum {
Brian Salomon25a88092016-12-01 09:36:50 -0500307 kIllegalOpID = 0,
bsalomonabd30f52015-08-13 13:34:48 -0700308 };
309
bsalomon88cf17d2016-07-08 06:40:56 -0700310 enum BoundsFlags {
311 kAABloat_BoundsFlag = 0x1,
312 kZeroArea_BoundsFlag = 0x2,
313 SkDEBUGCODE(kUninitialized_BoundsFlag = 0x4)
314 };
315
Brian Salomon588cec72018-11-14 13:56:37 -0500316 std::unique_ptr<GrOp> fNextInChain;
317 GrOp* fPrevInChain = nullptr;
bsalomon88cf17d2016-07-08 06:40:56 -0700318 const uint16_t fClassID;
319 uint16_t fBoundsFlags;
320
Brian Salomon25a88092016-12-01 09:36:50 -0500321 static uint32_t GenOpID() { return GenID(&gCurrOpUniqueID); }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400322 mutable uint32_t fUniqueID = SK_InvalidUniqueID;
bsalomon88cf17d2016-07-08 06:40:56 -0700323 SkRect fBounds;
324
Mike Klein0ec1c572018-12-04 11:52:51 -0500325 static std::atomic<uint32_t> gCurrOpUniqueID;
326 static std::atomic<uint32_t> gCurrOpClassID;
bsalomonabd30f52015-08-13 13:34:48 -0700327};
328
joshualitt4d8da812015-01-28 12:53:54 -0800329#endif