joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrBatch_DEFINED |
| 9 | #define GrBatch_DEFINED |
| 10 | |
| 11 | #include <new> |
joshualitt | dbe1e6f | 2015-07-16 08:12:45 -0700 | [diff] [blame] | 12 | #include "GrNonAtomicRef.h" |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 13 | |
bsalomon | 16b9913 | 2015-08-13 14:55:50 -0700 | [diff] [blame] | 14 | #include "SkRect.h" |
bsalomon | 5346983 | 2015-08-18 09:20:09 -0700 | [diff] [blame] | 15 | #include "SkString.h" |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 16 | |
bsalomon | 16b9913 | 2015-08-13 14:55:50 -0700 | [diff] [blame] | 17 | class GrCaps; |
bsalomon | 5346983 | 2015-08-18 09:20:09 -0700 | [diff] [blame] | 18 | class GrBatchFlushState; |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 19 | |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 20 | /** |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 21 | * GrBatch is the base class for all Ganesh deferred geometry generators. To facilitate |
| 22 | * reorderable batching, Ganesh does not generate geometry inline with draw calls. Instead, it |
| 23 | * captures the arguments to the draw and then generates the geometry on demand. This gives GrBatch |
| 24 | * subclasses complete freedom to decide how / what they can batch. |
| 25 | * |
| 26 | * Batches are created when GrContext processes a draw call. Batches of the same subclass may be |
| 27 | * merged using combineIfPossible. When two batches merge, one takes on the union of the data |
| 28 | * and the other is left empty. The merged batch becomes responsible for drawing the data from both |
| 29 | * the original batches. |
| 30 | * |
| 31 | * If there are any possible optimizations which might require knowing more about the full state of |
| 32 | * the draw, ie whether or not the GrBatch is allowed to tweak alpha for coverage, then this |
| 33 | * information will be communicated to the GrBatch prior to geometry generation. |
bsalomon | db4758c | 2015-11-23 11:14:20 -0800 | [diff] [blame^] | 34 | * |
| 35 | * The bounds of the batch must contain all the vertices in device space *irrespective* of the clip. |
| 36 | * The bounds are used in determining which clip elements must be applied and thus the bounds cannot |
| 37 | * in turn depend upon the clip. |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 38 | */ |
joshualitt | ca1f07e | 2015-08-07 08:11:19 -0700 | [diff] [blame] | 39 | #define GR_BATCH_SPEW 0 |
| 40 | #if GR_BATCH_SPEW |
| 41 | #define GrBATCH_INFO(...) SkDebugf(__VA_ARGS__) |
| 42 | #define GrBATCH_SPEW(code) code |
| 43 | #else |
| 44 | #define GrBATCH_SPEW(code) |
| 45 | #define GrBATCH_INFO(...) |
| 46 | #endif |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 47 | |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 48 | // A helper macro to generate a class static id |
| 49 | #define DEFINE_BATCH_CLASS_ID \ |
| 50 | static uint32_t ClassID() { \ |
| 51 | static uint32_t kClassID = GenBatchClassID(); \ |
| 52 | return kClassID; \ |
| 53 | } |
| 54 | |
joshualitt | dbe1e6f | 2015-07-16 08:12:45 -0700 | [diff] [blame] | 55 | class GrBatch : public GrNonAtomicRef { |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 56 | public: |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 57 | GrBatch(uint32_t classID); |
bsalomon | a387a11 | 2015-08-11 14:47:42 -0700 | [diff] [blame] | 58 | ~GrBatch() override; |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 59 | |
| 60 | virtual const char* name() const = 0; |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 61 | |
bsalomon | cb02b38 | 2015-08-12 11:14:50 -0700 | [diff] [blame] | 62 | bool combineIfPossible(GrBatch* that, const GrCaps& caps) { |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 63 | if (this->classID() != that->classID()) { |
| 64 | return false; |
| 65 | } |
| 66 | |
bsalomon | cb02b38 | 2015-08-12 11:14:50 -0700 | [diff] [blame] | 67 | return this->onCombineIfPossible(that, caps); |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 68 | } |
| 69 | |
joshualitt | 99c7c07 | 2015-05-01 13:43:30 -0700 | [diff] [blame] | 70 | const SkRect& bounds() const { return fBounds; } |
| 71 | |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 72 | void* operator new(size_t size); |
| 73 | void operator delete(void* target); |
| 74 | |
| 75 | void* operator new(size_t size, void* placement) { |
| 76 | return ::operator new(size, placement); |
| 77 | } |
| 78 | void operator delete(void* target, void* placement) { |
| 79 | ::operator delete(target, placement); |
| 80 | } |
| 81 | |
| 82 | /** |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 83 | * Helper for safely down-casting to a GrBatch subclass |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 84 | */ |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 85 | template <typename T> const T& cast() const { |
| 86 | SkASSERT(T::ClassID() == this->classID()); |
| 87 | return *static_cast<const T*>(this); |
| 88 | } |
| 89 | |
| 90 | template <typename T> T* cast() { |
| 91 | SkASSERT(T::ClassID() == this->classID()); |
| 92 | return static_cast<T*>(this); |
| 93 | } |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 94 | |
joshualitt | ca1f07e | 2015-08-07 08:11:19 -0700 | [diff] [blame] | 95 | uint32_t classID() const { SkASSERT(kIllegalBatchID != fClassID); return fClassID; } |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 96 | |
joshualitt | ca1f07e | 2015-08-07 08:11:19 -0700 | [diff] [blame] | 97 | #if GR_BATCH_SPEW |
| 98 | uint32_t uniqueID() const { return fUniqueID; } |
| 99 | #endif |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 100 | SkDEBUGCODE(bool isUsed() const { return fUsed; }) |
joshualitt | ca1f07e | 2015-08-07 08:11:19 -0700 | [diff] [blame] | 101 | |
bsalomon | 5346983 | 2015-08-18 09:20:09 -0700 | [diff] [blame] | 102 | /** Called prior to drawing. The batch should perform any resource creation necessary to |
| 103 | to quickly issue its draw when draw is called. */ |
| 104 | void prepare(GrBatchFlushState* state) { this->onPrepare(state); } |
| 105 | |
| 106 | /** Issues the batches commands to GrGpu. */ |
| 107 | void draw(GrBatchFlushState* state) { this->onDraw(state); } |
| 108 | |
| 109 | /** Used to block batching across render target changes. Remove this once we store |
| 110 | GrBatches for different RTs in different targets. */ |
| 111 | virtual uint32_t renderTargetUniqueID() const = 0; |
| 112 | |
| 113 | /** Used for spewing information about batches when debugging. */ |
| 114 | virtual SkString dumpInfo() const = 0; |
| 115 | |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 116 | protected: |
joshualitt | 99c7c07 | 2015-05-01 13:43:30 -0700 | [diff] [blame] | 117 | // NOTE, compute some bounds, even if extremely conservative. Do *NOT* setLargest on the bounds |
| 118 | // rect because we outset it for dst copy textures |
| 119 | void setBounds(const SkRect& newBounds) { fBounds = newBounds; } |
| 120 | |
| 121 | void joinBounds(const SkRect& otherBounds) { |
| 122 | return fBounds.joinPossiblyEmptyRect(otherBounds); |
| 123 | } |
| 124 | |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 125 | static uint32_t GenBatchClassID() { return GenID(&gCurrBatchClassID); } |
| 126 | |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 127 | SkRect fBounds; |
| 128 | |
| 129 | private: |
| 130 | virtual bool onCombineIfPossible(GrBatch*, const GrCaps& caps) = 0; |
| 131 | |
bsalomon | 5346983 | 2015-08-18 09:20:09 -0700 | [diff] [blame] | 132 | virtual void onPrepare(GrBatchFlushState*) = 0; |
| 133 | virtual void onDraw(GrBatchFlushState*) = 0; |
| 134 | |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 135 | static uint32_t GenID(int32_t* idCounter) { |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 136 | // The atomic inc returns the old value not the incremented value. So we add |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 137 | // 1 to the returned value. |
| 138 | uint32_t id = static_cast<uint32_t>(sk_atomic_inc(idCounter)) + 1; |
| 139 | if (!id) { |
| 140 | SkFAIL("This should never wrap as it should only be called once for each GrBatch " |
| 141 | "subclass."); |
| 142 | } |
| 143 | return id; |
| 144 | } |
| 145 | |
| 146 | enum { |
| 147 | kIllegalBatchID = 0, |
| 148 | }; |
| 149 | |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 150 | SkDEBUGCODE(bool fUsed;) |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 151 | const uint32_t fClassID; |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 152 | #if GR_BATCH_SPEW |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 153 | static uint32_t GenBatchID() { return GenID(&gCurrBatchUniqueID); } |
| 154 | const uint32_t fUniqueID; |
| 155 | static int32_t gCurrBatchUniqueID; |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 156 | #endif |
reed | 1b55a96 | 2015-09-17 20:16:13 -0700 | [diff] [blame] | 157 | static int32_t gCurrBatchClassID; |
bsalomon | abd30f5 | 2015-08-13 13:34:48 -0700 | [diff] [blame] | 158 | typedef GrNonAtomicRef INHERITED; |
| 159 | }; |
| 160 | |
joshualitt | 4d8da81 | 2015-01-28 12:53:54 -0800 | [diff] [blame] | 161 | #endif |