blob: c78971a34d33a15c7f9cce6c6c7cfad766527408 [file] [log] [blame]
Chris Craikc3566d02013-02-04 16:16:33 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "OpenGLRenderer"
18#define ATRACE_TAG ATRACE_TAG_VIEW
19
Romain Guyc46d07a2013-03-15 19:06:39 -070020#include <SkCanvas.h>
21
Chris Craikc3566d02013-02-04 16:16:33 -080022#include <utils/Trace.h>
Chris Craik28ce94a2013-05-31 11:38:03 -070023#include <ui/Rect.h>
24#include <ui/Region.h>
Chris Craikc3566d02013-02-04 16:16:33 -080025
Romain Guycf51a412013-04-08 19:40:31 -070026#include "Caches.h"
Chris Craikc3566d02013-02-04 16:16:33 -080027#include "Debug.h"
Chris Craik527a3aa2013-03-04 10:19:31 -080028#include "DeferredDisplayList.h"
Chris Craikc3566d02013-02-04 16:16:33 -080029#include "DisplayListOp.h"
30#include "OpenGLRenderer.h"
Chris Craikdeeda3d2014-05-05 19:09:33 -070031#include "utils/MathUtils.h"
Chris Craikc3566d02013-02-04 16:16:33 -080032
33#if DEBUG_DEFER
34 #define DEFER_LOGD(...) ALOGD(__VA_ARGS__)
35#else
36 #define DEFER_LOGD(...)
37#endif
38
39namespace android {
40namespace uirenderer {
41
Chris Craik1ed30c92013-04-03 12:37:35 -070042// Depth of the save stack at the beginning of batch playback at flush time
43#define FLUSH_SAVE_STACK_DEPTH 2
44
Chris Craik527a3aa2013-03-04 10:19:31 -080045#define DEBUG_COLOR_BARRIER 0x1f000000
46#define DEBUG_COLOR_MERGEDBATCH 0x5f7f7fff
47#define DEBUG_COLOR_MERGEDBATCH_SOLO 0x5f7fff7f
48
Chris Craikff785832013-03-08 13:12:16 -080049/////////////////////////////////////////////////////////////////////////////////
50// Operation Batches
51/////////////////////////////////////////////////////////////////////////////////
52
Chris Craik527a3aa2013-03-04 10:19:31 -080053class Batch {
Chris Craikc3566d02013-02-04 16:16:33 -080054public:
Tom Hudson107843d2014-09-08 11:26:26 -040055 virtual void replay(OpenGLRenderer& renderer, Rect& dirty, int index) = 0;
Chris Craik527a3aa2013-03-04 10:19:31 -080056 virtual ~Batch() {}
Chris Craik28ce94a2013-05-31 11:38:03 -070057 virtual bool purelyDrawBatch() { return false; }
Andreas Gampe64bb4132014-11-22 00:35:09 +000058 virtual bool coversBounds(const Rect& bounds) { return false; }
Chris Craik527a3aa2013-03-04 10:19:31 -080059};
Chris Craikc3566d02013-02-04 16:16:33 -080060
Chris Craik527a3aa2013-03-04 10:19:31 -080061class DrawBatch : public Batch {
62public:
Chris Craik28ce94a2013-05-31 11:38:03 -070063 DrawBatch(const DeferInfo& deferInfo) : mAllOpsOpaque(true),
64 mBatchId(deferInfo.batchId), mMergeId(deferInfo.mergeId) {
Chris Craik527a3aa2013-03-04 10:19:31 -080065 mOps.clear();
66 }
67
68 virtual ~DrawBatch() { mOps.clear(); }
Chris Craikc3566d02013-02-04 16:16:33 -080069
Chris Craikc1c5f082013-09-11 16:23:37 -070070 virtual void add(DrawOp* op, const DeferredDisplayState* state, bool opaqueOverBounds) {
Chris Craikc3566d02013-02-04 16:16:33 -080071 // NOTE: ignore empty bounds special case, since we don't merge across those ops
Chris Craikc1c5f082013-09-11 16:23:37 -070072 mBounds.unionWith(state->mBounds);
Chris Craik28ce94a2013-05-31 11:38:03 -070073 mAllOpsOpaque &= opaqueOverBounds;
Chris Craikc1c5f082013-09-11 16:23:37 -070074 mOps.add(OpStatePair(op, state));
Chris Craikc3566d02013-02-04 16:16:33 -080075 }
76
Chris Craikc1c5f082013-09-11 16:23:37 -070077 bool intersects(const Rect& rect) {
Chris Craikc3566d02013-02-04 16:16:33 -080078 if (!rect.intersects(mBounds)) return false;
Chris Craikff785832013-03-08 13:12:16 -080079
Chris Craikc3566d02013-02-04 16:16:33 -080080 for (unsigned int i = 0; i < mOps.size(); i++) {
Chris Craikc1c5f082013-09-11 16:23:37 -070081 if (rect.intersects(mOps[i].state->mBounds)) {
Chris Craikc3566d02013-02-04 16:16:33 -080082#if DEBUG_DEFER
Chris Craikc1c5f082013-09-11 16:23:37 -070083 DEFER_LOGD("op intersects with op %p with bounds %f %f %f %f:", mOps[i].op,
84 mOps[i].state->mBounds.left, mOps[i].state->mBounds.top,
85 mOps[i].state->mBounds.right, mOps[i].state->mBounds.bottom);
86 mOps[i].op->output(2);
Chris Craikc3566d02013-02-04 16:16:33 -080087#endif
88 return true;
89 }
90 }
91 return false;
92 }
93
Chris Craikd41c4d82015-01-05 15:51:13 -080094 virtual void replay(OpenGLRenderer& renderer, Rect& dirty, int index) override {
Chris Craik41541822013-05-03 16:35:54 -070095 DEFER_LOGD("%d replaying DrawBatch %p, with %d ops (batch id %x, merge id %p)",
96 index, this, mOps.size(), getBatchId(), getMergeId());
Chris Craikff785832013-03-08 13:12:16 -080097
Chris Craikff785832013-03-08 13:12:16 -080098 for (unsigned int i = 0; i < mOps.size(); i++) {
Chris Craikc1c5f082013-09-11 16:23:37 -070099 DrawOp* op = mOps[i].op;
100 const DeferredDisplayState* state = mOps[i].state;
101 renderer.restoreDisplayState(*state);
Chris Craikff785832013-03-08 13:12:16 -0800102
103#if DEBUG_DISPLAY_LIST_OPS_AS_EVENTS
Chris Craikd90144d2013-03-19 15:03:48 -0700104 renderer.eventMark(op->name());
Chris Craikff785832013-03-08 13:12:16 -0800105#endif
Tom Hudson107843d2014-09-08 11:26:26 -0400106 op->applyDraw(renderer, dirty);
Chris Craik527a3aa2013-03-04 10:19:31 -0800107
108#if DEBUG_MERGE_BEHAVIOR
Chris Craikc1c5f082013-09-11 16:23:37 -0700109 const Rect& bounds = state->mBounds;
Chris Craik527a3aa2013-03-04 10:19:31 -0800110 int batchColor = 0x1f000000;
111 if (getBatchId() & 0x1) batchColor |= 0x0000ff;
112 if (getBatchId() & 0x2) batchColor |= 0x00ff00;
113 if (getBatchId() & 0x4) batchColor |= 0xff0000;
114 renderer.drawScreenSpaceColorRect(bounds.left, bounds.top, bounds.right, bounds.bottom,
115 batchColor);
116#endif
Chris Craikff785832013-03-08 13:12:16 -0800117 }
Chris Craikff785832013-03-08 13:12:16 -0800118 }
119
Chris Craikd41c4d82015-01-05 15:51:13 -0800120 virtual bool purelyDrawBatch() override { return true; }
Chris Craik28ce94a2013-05-31 11:38:03 -0700121
Chris Craikd41c4d82015-01-05 15:51:13 -0800122 virtual bool coversBounds(const Rect& bounds) override {
Chris Craik28ce94a2013-05-31 11:38:03 -0700123 if (CC_LIKELY(!mAllOpsOpaque || !mBounds.contains(bounds) || count() == 1)) return false;
124
125 Region uncovered(android::Rect(bounds.left, bounds.top, bounds.right, bounds.bottom));
126 for (unsigned int i = 0; i < mOps.size(); i++) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700127 const Rect &r = mOps[i].state->mBounds;
Chris Craik28ce94a2013-05-31 11:38:03 -0700128 uncovered.subtractSelf(android::Rect(r.left, r.top, r.right, r.bottom));
129 }
130 return uncovered.isEmpty();
131 }
132
Chris Craik527a3aa2013-03-04 10:19:31 -0800133 inline int getBatchId() const { return mBatchId; }
134 inline mergeid_t getMergeId() const { return mMergeId; }
Chris Craikff785832013-03-08 13:12:16 -0800135 inline int count() const { return mOps.size(); }
Chris Craik527a3aa2013-03-04 10:19:31 -0800136
137protected:
Chris Craikc1c5f082013-09-11 16:23:37 -0700138 Vector<OpStatePair> mOps;
Chris Craik28ce94a2013-05-31 11:38:03 -0700139 Rect mBounds; // union of bounds of contained ops
Chris Craik527a3aa2013-03-04 10:19:31 -0800140private:
Chris Craik28ce94a2013-05-31 11:38:03 -0700141 bool mAllOpsOpaque;
Chris Craik527a3aa2013-03-04 10:19:31 -0800142 int mBatchId;
143 mergeid_t mMergeId;
Chris Craikc3566d02013-02-04 16:16:33 -0800144};
145
Chris Craik527a3aa2013-03-04 10:19:31 -0800146class MergingDrawBatch : public DrawBatch {
147public:
Chris Craik0e87f002013-06-19 16:54:59 -0700148 MergingDrawBatch(DeferInfo& deferInfo, int width, int height) :
149 DrawBatch(deferInfo), mClipRect(width, height),
150 mClipSideFlags(kClipSide_None) {}
Chris Craika02c4ed2013-06-14 13:43:58 -0700151
152 /*
153 * Helper for determining if a new op can merge with a MergingDrawBatch based on their bounds
154 * and clip side flags. Positive bounds delta means new bounds fit in old.
155 */
156 static inline bool checkSide(const int currentFlags, const int newFlags, const int side,
157 float boundsDelta) {
158 bool currentClipExists = currentFlags & side;
159 bool newClipExists = newFlags & side;
160
161 // if current is clipped, we must be able to fit new bounds in current
162 if (boundsDelta > 0 && currentClipExists) return false;
163
164 // if new is clipped, we must be able to fit current bounds in new
165 if (boundsDelta < 0 && newClipExists) return false;
166
167 return true;
168 }
Chris Craik527a3aa2013-03-04 10:19:31 -0800169
170 /*
171 * Checks if a (mergeable) op can be merged into this batch
172 *
173 * If true, the op's multiDraw must be guaranteed to handle both ops simultaneously, so it is
174 * important to consider all paint attributes used in the draw calls in deciding both a) if an
Chris Craika02c4ed2013-06-14 13:43:58 -0700175 * op tries to merge at all, and b) if the op can merge with another set of ops
Chris Craik527a3aa2013-03-04 10:19:31 -0800176 *
177 * False positives can lead to information from the paints of subsequent merged operations being
178 * dropped, so we make simplifying qualifications on the ops that can merge, per op type.
179 */
Chris Craikc1c5f082013-09-11 16:23:37 -0700180 bool canMergeWith(const DrawOp* op, const DeferredDisplayState* state) {
Chris Craik527a3aa2013-03-04 10:19:31 -0800181 bool isTextBatch = getBatchId() == DeferredDisplayList::kOpBatch_Text ||
182 getBatchId() == DeferredDisplayList::kOpBatch_ColorText;
183
184 // Overlapping other operations is only allowed for text without shadow. For other ops,
185 // multiDraw isn't guaranteed to overdraw correctly
Derek Sollenbergerc29a0a42014-03-31 13:52:39 -0400186 if (!isTextBatch || op->hasTextShadow()) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700187 if (intersects(state->mBounds)) return false;
Chris Craik527a3aa2013-03-04 10:19:31 -0800188 }
Chris Craikc1c5f082013-09-11 16:23:37 -0700189 const DeferredDisplayState* lhs = state;
190 const DeferredDisplayState* rhs = mOps[0].state;
Chris Craik527a3aa2013-03-04 10:19:31 -0800191
Chris Craikdeeda3d2014-05-05 19:09:33 -0700192 if (!MathUtils::areEqual(lhs->mAlpha, rhs->mAlpha)) return false;
193
194 // Identical round rect clip state means both ops will clip in the same way, or not at all.
195 // As the state objects are const, we can compare their pointers to determine mergeability
196 if (lhs->mRoundRectClipState != rhs->mRoundRectClipState) return false;
Chris Craik527a3aa2013-03-04 10:19:31 -0800197
Chris Craika02c4ed2013-06-14 13:43:58 -0700198 /* Clipping compatibility check
199 *
200 * Exploits the fact that if a op or batch is clipped on a side, its bounds will equal its
201 * clip for that side.
202 */
203 const int currentFlags = mClipSideFlags;
Chris Craikc1c5f082013-09-11 16:23:37 -0700204 const int newFlags = state->mClipSideFlags;
Chris Craika02c4ed2013-06-14 13:43:58 -0700205 if (currentFlags != kClipSide_None || newFlags != kClipSide_None) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700206 const Rect& opBounds = state->mBounds;
Chris Craika02c4ed2013-06-14 13:43:58 -0700207 float boundsDelta = mBounds.left - opBounds.left;
208 if (!checkSide(currentFlags, newFlags, kClipSide_Left, boundsDelta)) return false;
209 boundsDelta = mBounds.top - opBounds.top;
210 if (!checkSide(currentFlags, newFlags, kClipSide_Top, boundsDelta)) return false;
211
212 // right and bottom delta calculation reversed to account for direction
213 boundsDelta = opBounds.right - mBounds.right;
214 if (!checkSide(currentFlags, newFlags, kClipSide_Right, boundsDelta)) return false;
215 boundsDelta = opBounds.bottom - mBounds.bottom;
216 if (!checkSide(currentFlags, newFlags, kClipSide_Bottom, boundsDelta)) return false;
Chris Craik28ce94a2013-05-31 11:38:03 -0700217 }
Chris Craik28ce94a2013-05-31 11:38:03 -0700218
Chris Craik527a3aa2013-03-04 10:19:31 -0800219 // if paints are equal, then modifiers + paint attribs don't need to be compared
Chris Craikc1c5f082013-09-11 16:23:37 -0700220 if (op->mPaint == mOps[0].op->mPaint) return true;
Chris Craik527a3aa2013-03-04 10:19:31 -0800221
Chris Craikc1c5f082013-09-11 16:23:37 -0700222 if (op->getPaintAlpha() != mOps[0].op->getPaintAlpha()) return false;
Chris Craik527a3aa2013-03-04 10:19:31 -0800223
Derek Sollenberger76d3a1b2013-12-10 12:28:58 -0500224 if (op->mPaint && mOps[0].op->mPaint &&
225 op->mPaint->getColorFilter() != mOps[0].op->mPaint->getColorFilter()) {
226 return false;
227 }
228
Leon Scroggins IIId1ad5e62014-05-05 12:50:38 -0400229 if (op->mPaint && mOps[0].op->mPaint &&
230 op->mPaint->getShader() != mOps[0].op->mPaint->getShader()) {
231 return false;
232 }
233
Chris Craik527a3aa2013-03-04 10:19:31 -0800234 return true;
235 }
236
Chris Craikd41c4d82015-01-05 15:51:13 -0800237 virtual void add(DrawOp* op, const DeferredDisplayState* state,
238 bool opaqueOverBounds) override {
Chris Craikc1c5f082013-09-11 16:23:37 -0700239 DrawBatch::add(op, state, opaqueOverBounds);
Chris Craik28ce94a2013-05-31 11:38:03 -0700240
Chris Craikc1c5f082013-09-11 16:23:37 -0700241 const int newClipSideFlags = state->mClipSideFlags;
Chris Craik28ce94a2013-05-31 11:38:03 -0700242 mClipSideFlags |= newClipSideFlags;
Chris Craikc1c5f082013-09-11 16:23:37 -0700243 if (newClipSideFlags & kClipSide_Left) mClipRect.left = state->mClip.left;
244 if (newClipSideFlags & kClipSide_Top) mClipRect.top = state->mClip.top;
245 if (newClipSideFlags & kClipSide_Right) mClipRect.right = state->mClip.right;
246 if (newClipSideFlags & kClipSide_Bottom) mClipRect.bottom = state->mClip.bottom;
Chris Craik28ce94a2013-05-31 11:38:03 -0700247 }
248
Chris Craikd41c4d82015-01-05 15:51:13 -0800249 virtual void replay(OpenGLRenderer& renderer, Rect& dirty, int index) override {
Chris Craik28ce94a2013-05-31 11:38:03 -0700250 DEFER_LOGD("%d replaying MergingDrawBatch %p, with %d ops,"
251 " clip flags %x (batch id %x, merge id %p)",
252 index, this, mOps.size(), mClipSideFlags, getBatchId(), getMergeId());
Chris Craik527a3aa2013-03-04 10:19:31 -0800253 if (mOps.size() == 1) {
Tom Hudson107843d2014-09-08 11:26:26 -0400254 DrawBatch::replay(renderer, dirty, -1);
255 return;
Chris Craik527a3aa2013-03-04 10:19:31 -0800256 }
257
Chris Craik28ce94a2013-05-31 11:38:03 -0700258 // clipping in the merged case is done ahead of time since all ops share the clip (if any)
Chris Craikd41c4d82015-01-05 15:51:13 -0800259 renderer.setupMergedMultiDraw(mClipSideFlags ? &mClipRect : nullptr);
Chris Craik28ce94a2013-05-31 11:38:03 -0700260
Chris Craikc1c5f082013-09-11 16:23:37 -0700261 DrawOp* op = mOps[0].op;
Chris Craikd965bc52013-09-16 14:47:13 -0700262#if DEBUG_DISPLAY_LIST_OPS_AS_EVENTS
263 renderer.eventMark("multiDraw");
264 renderer.eventMark(op->name());
265#endif
Tom Hudson107843d2014-09-08 11:26:26 -0400266 op->multiDraw(renderer, dirty, mOps, mBounds);
Chris Craik527a3aa2013-03-04 10:19:31 -0800267
268#if DEBUG_MERGE_BEHAVIOR
269 renderer.drawScreenSpaceColorRect(mBounds.left, mBounds.top, mBounds.right, mBounds.bottom,
270 DEBUG_COLOR_MERGEDBATCH);
271#endif
Chris Craik527a3aa2013-03-04 10:19:31 -0800272 }
Chris Craik28ce94a2013-05-31 11:38:03 -0700273
274private:
275 /*
276 * Contains the effective clip rect shared by all merged ops. Initialized to the layer viewport,
277 * it will shrink if an op must be clipped on a certain side. The clipped sides are reflected in
278 * mClipSideFlags.
279 */
280 Rect mClipRect;
281 int mClipSideFlags;
Chris Craik527a3aa2013-03-04 10:19:31 -0800282};
283
284class StateOpBatch : public Batch {
Chris Craikff785832013-03-08 13:12:16 -0800285public:
286 // creates a single operation batch
Chris Craikc1c5f082013-09-11 16:23:37 -0700287 StateOpBatch(const StateOp* op, const DeferredDisplayState* state) : mOp(op), mState(state) {}
Chris Craikff785832013-03-08 13:12:16 -0800288
Chris Craikd41c4d82015-01-05 15:51:13 -0800289 virtual void replay(OpenGLRenderer& renderer, Rect& dirty, int index) override {
Chris Craikff785832013-03-08 13:12:16 -0800290 DEFER_LOGD("replaying state op batch %p", this);
Chris Craikc1c5f082013-09-11 16:23:37 -0700291 renderer.restoreDisplayState(*mState);
Chris Craikff785832013-03-08 13:12:16 -0800292
293 // use invalid save count because it won't be used at flush time - RestoreToCountOp is the
294 // only one to use it, and we don't use that class at flush time, instead calling
295 // renderer.restoreToCount directly
296 int saveCount = -1;
297 mOp->applyState(renderer, saveCount);
Chris Craikff785832013-03-08 13:12:16 -0800298 }
299
300private:
Chris Craik7273daa2013-03-28 11:25:24 -0700301 const StateOp* mOp;
Chris Craikc1c5f082013-09-11 16:23:37 -0700302 const DeferredDisplayState* mState;
Chris Craikff785832013-03-08 13:12:16 -0800303};
304
Chris Craik527a3aa2013-03-04 10:19:31 -0800305class RestoreToCountBatch : public Batch {
Chris Craikff785832013-03-08 13:12:16 -0800306public:
Andreas Gampe64bb4132014-11-22 00:35:09 +0000307 RestoreToCountBatch(const StateOp* op, const DeferredDisplayState* state, int restoreCount) :
308 mState(state), mRestoreCount(restoreCount) {}
Chris Craikff785832013-03-08 13:12:16 -0800309
Chris Craikd41c4d82015-01-05 15:51:13 -0800310 virtual void replay(OpenGLRenderer& renderer, Rect& dirty, int index) override {
Chris Craikff785832013-03-08 13:12:16 -0800311 DEFER_LOGD("batch %p restoring to count %d", this, mRestoreCount);
Chris Craik7273daa2013-03-28 11:25:24 -0700312
Chris Craikc1c5f082013-09-11 16:23:37 -0700313 renderer.restoreDisplayState(*mState);
Chris Craikff785832013-03-08 13:12:16 -0800314 renderer.restoreToCount(mRestoreCount);
Chris Craikff785832013-03-08 13:12:16 -0800315 }
316
317private:
Chris Craik7273daa2013-03-28 11:25:24 -0700318 // we use the state storage for the RestoreToCountOp, but don't replay the op itself
Chris Craikc1c5f082013-09-11 16:23:37 -0700319 const DeferredDisplayState* mState;
320
Chris Craikff785832013-03-08 13:12:16 -0800321 /*
322 * The count used here represents the flush() time saveCount. This is as opposed to the
323 * DisplayList record time, or defer() time values (which are RestoreToCountOp's mCount, and
324 * (saveCount + mCount) respectively). Since the count is different from the original
325 * RestoreToCountOp, we don't store a pointer to the op, as elsewhere.
326 */
327 const int mRestoreCount;
328};
329
Chris Craik527a3aa2013-03-04 10:19:31 -0800330#if DEBUG_MERGE_BEHAVIOR
331class BarrierDebugBatch : public Batch {
Tom Hudson107843d2014-09-08 11:26:26 -0400332 virtual void replay(OpenGLRenderer& renderer, Rect& dirty, int index) {
Chris Craik527a3aa2013-03-04 10:19:31 -0800333 renderer.drawScreenSpaceColorRect(0, 0, 10000, 10000, DEBUG_COLOR_BARRIER);
Chris Craik527a3aa2013-03-04 10:19:31 -0800334 }
335};
336#endif
337
Chris Craikff785832013-03-08 13:12:16 -0800338/////////////////////////////////////////////////////////////////////////////////
339// DeferredDisplayList
340/////////////////////////////////////////////////////////////////////////////////
341
342void DeferredDisplayList::resetBatchingState() {
Chris Craikc3566d02013-02-04 16:16:33 -0800343 for (int i = 0; i < kOpBatch_Count; i++) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800344 mBatchLookup[i] = nullptr;
Chris Craik527a3aa2013-03-04 10:19:31 -0800345 mMergingBatches[i].clear();
Chris Craikc3566d02013-02-04 16:16:33 -0800346 }
Chris Craik527a3aa2013-03-04 10:19:31 -0800347#if DEBUG_MERGE_BEHAVIOR
348 if (mBatches.size() != 0) {
349 mBatches.add(new BarrierDebugBatch());
350 }
351#endif
352 mEarliestBatchIndex = mBatches.size();
Chris Craikff785832013-03-08 13:12:16 -0800353}
354
355void DeferredDisplayList::clear() {
356 resetBatchingState();
357 mComplexClipStackStart = -1;
358
Chris Craikc3566d02013-02-04 16:16:33 -0800359 for (unsigned int i = 0; i < mBatches.size(); i++) {
360 delete mBatches[i];
361 }
362 mBatches.clear();
Chris Craikff785832013-03-08 13:12:16 -0800363 mSaveStack.clear();
Chris Craik527a3aa2013-03-04 10:19:31 -0800364 mEarliestBatchIndex = 0;
Chris Craik28ce94a2013-05-31 11:38:03 -0700365 mEarliestUnclearedIndex = 0;
Chris Craikc3566d02013-02-04 16:16:33 -0800366}
367
Chris Craikff785832013-03-08 13:12:16 -0800368/////////////////////////////////////////////////////////////////////////////////
369// Operation adding
370/////////////////////////////////////////////////////////////////////////////////
371
372int DeferredDisplayList::getStateOpDeferFlags() const {
373 // For both clipOp and save(Layer)Op, we don't want to save drawing info, and only want to save
374 // the clip if we aren't recording a complex clip (and can thus trust it to be a rect)
375 return recordingComplexClip() ? 0 : kStateDeferFlag_Clip;
376}
377
378int DeferredDisplayList::getDrawOpDeferFlags() const {
379 return kStateDeferFlag_Draw | getStateOpDeferFlags();
380}
381
382/**
383 * When an clipping operation occurs that could cause a complex clip, record the operation and all
384 * subsequent clipOps, save/restores (if the clip flag is set). During a flush, instead of loading
385 * the clip from deferred state, we play back all of the relevant state operations that generated
386 * the complex clip.
387 *
388 * Note that we don't need to record the associated restore operation, since operations at defer
389 * time record whether they should store the renderer's current clip
390 */
391void DeferredDisplayList::addClip(OpenGLRenderer& renderer, ClipOp* op) {
392 if (recordingComplexClip() || op->canCauseComplexClip() || !renderer.hasRectToRectTransform()) {
393 DEFER_LOGD("%p Received complex clip operation %p", this, op);
394
395 // NOTE: defer clip op before setting mComplexClipStackStart so previous clip is recorded
396 storeStateOpBarrier(renderer, op);
397
398 if (!recordingComplexClip()) {
399 mComplexClipStackStart = renderer.getSaveCount() - 1;
400 DEFER_LOGD(" Starting complex clip region, start is %d", mComplexClipStackStart);
Chris Craikc3566d02013-02-04 16:16:33 -0800401 }
Chris Craikff785832013-03-08 13:12:16 -0800402 }
403}
404
405/**
406 * For now, we record save layer operations as barriers in the batch list, preventing drawing
407 * operations from reordering around the saveLayer and it's associated restore()
408 *
409 * In the future, we should send saveLayer commands (if they can be played out of order) and their
410 * contained drawing operations to a seperate list of batches, so that they may draw at the
411 * beginning of the frame. This would avoid targetting and removing an FBO in the middle of a frame.
412 *
413 * saveLayer operations should be pulled to the beginning of the frame if the canvas doesn't have a
414 * complex clip, and if the flags (kClip_SaveFlag & kClipToLayer_SaveFlag) are set.
415 */
416void DeferredDisplayList::addSaveLayer(OpenGLRenderer& renderer,
417 SaveLayerOp* op, int newSaveCount) {
418 DEFER_LOGD("%p adding saveLayerOp %p, flags %x, new count %d",
419 this, op, op->getFlags(), newSaveCount);
420
421 storeStateOpBarrier(renderer, op);
422 mSaveStack.push(newSaveCount);
423}
424
425/**
426 * Takes save op and it's return value - the new save count - and stores it into the stream as a
427 * barrier if it's needed to properly modify a complex clip
428 */
429void DeferredDisplayList::addSave(OpenGLRenderer& renderer, SaveOp* op, int newSaveCount) {
430 int saveFlags = op->getFlags();
431 DEFER_LOGD("%p adding saveOp %p, flags %x, new count %d", this, op, saveFlags, newSaveCount);
432
433 if (recordingComplexClip() && (saveFlags & SkCanvas::kClip_SaveFlag)) {
434 // store and replay the save operation, as it may be needed to correctly playback the clip
435 DEFER_LOGD(" adding save barrier with new save count %d", newSaveCount);
436 storeStateOpBarrier(renderer, op);
437 mSaveStack.push(newSaveCount);
438 }
439}
440
441/**
442 * saveLayer() commands must be associated with a restoreToCount batch that will clean up and draw
443 * the layer in the deferred list
444 *
445 * other save() commands which occur as children of a snapshot with complex clip will be deferred,
446 * and must be restored
447 *
448 * Either will act as a barrier to draw operation reordering, as we want to play back layer
449 * save/restore and complex canvas modifications (including save/restore) in order.
450 */
Chris Craik7273daa2013-03-28 11:25:24 -0700451void DeferredDisplayList::addRestoreToCount(OpenGLRenderer& renderer, StateOp* op,
452 int newSaveCount) {
Chris Craikff785832013-03-08 13:12:16 -0800453 DEFER_LOGD("%p addRestoreToCount %d", this, newSaveCount);
454
455 if (recordingComplexClip() && newSaveCount <= mComplexClipStackStart) {
456 mComplexClipStackStart = -1;
457 resetBatchingState();
458 }
459
460 if (mSaveStack.isEmpty() || newSaveCount > mSaveStack.top()) {
461 return;
462 }
463
464 while (!mSaveStack.isEmpty() && mSaveStack.top() >= newSaveCount) mSaveStack.pop();
465
Chris Craik1ed30c92013-04-03 12:37:35 -0700466 storeRestoreToCountBarrier(renderer, op, mSaveStack.size() + FLUSH_SAVE_STACK_DEPTH);
Chris Craikff785832013-03-08 13:12:16 -0800467}
468
469void DeferredDisplayList::addDrawOp(OpenGLRenderer& renderer, DrawOp* op) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700470 /* 1: op calculates local bounds */
471 DeferredDisplayState* const state = createState();
John Reck3b202512014-06-23 13:13:08 -0700472 if (op->getLocalBounds(state->mBounds)) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700473 if (state->mBounds.isEmpty()) {
474 // valid empty bounds, don't bother deferring
475 tryRecycleState(state);
476 return;
477 }
478 } else {
479 state->mBounds.setEmpty();
480 }
481
482 /* 2: renderer calculates global bounds + stores state */
483 if (renderer.storeDisplayState(*state, getDrawOpDeferFlags())) {
484 tryRecycleState(state);
Chris Craikff785832013-03-08 13:12:16 -0800485 return; // quick rejected
486 }
487
Chris Craikc1c5f082013-09-11 16:23:37 -0700488 /* 3: ask op for defer info, given renderer state */
Chris Craik28ce94a2013-05-31 11:38:03 -0700489 DeferInfo deferInfo;
Chris Craikc1c5f082013-09-11 16:23:37 -0700490 op->onDefer(renderer, deferInfo, *state);
Chris Craik527a3aa2013-03-04 10:19:31 -0800491
492 // complex clip has a complex set of expectations on the renderer state - for now, avoid taking
493 // the merge path in those cases
Chris Craik28ce94a2013-05-31 11:38:03 -0700494 deferInfo.mergeable &= !recordingComplexClip();
Chris Craik0e87f002013-06-19 16:54:59 -0700495 deferInfo.opaqueOverBounds &= !recordingComplexClip() && mSaveStack.isEmpty();
Chris Craik28ce94a2013-05-31 11:38:03 -0700496
Chris Craikef8d6f22014-12-17 11:10:28 -0800497 if (CC_LIKELY(mAvoidOverdraw) && mBatches.size() &&
Chris Craikc1c5f082013-09-11 16:23:37 -0700498 state->mClipSideFlags != kClipSide_ConservativeFull &&
499 deferInfo.opaqueOverBounds && state->mBounds.contains(mBounds)) {
Chris Craikf70119c2013-06-13 11:21:22 -0700500 // avoid overdraw by resetting drawing state + discarding drawing ops
Chris Craik28ce94a2013-05-31 11:38:03 -0700501 discardDrawingBatches(mBatches.size() - 1);
Chris Craikf70119c2013-06-13 11:21:22 -0700502 resetBatchingState();
Chris Craik28ce94a2013-05-31 11:38:03 -0700503 }
Chris Craikff785832013-03-08 13:12:16 -0800504
505 if (CC_UNLIKELY(renderer.getCaches().drawReorderDisabled)) {
506 // TODO: elegant way to reuse batches?
Chris Craik28ce94a2013-05-31 11:38:03 -0700507 DrawBatch* b = new DrawBatch(deferInfo);
Chris Craikc1c5f082013-09-11 16:23:37 -0700508 b->add(op, state, deferInfo.opaqueOverBounds);
Chris Craikc3566d02013-02-04 16:16:33 -0800509 mBatches.add(b);
510 return;
511 }
512
Chris Craik527a3aa2013-03-04 10:19:31 -0800513 // find the latest batch of the new op's type, and try to merge the new op into it
Chris Craikd41c4d82015-01-05 15:51:13 -0800514 DrawBatch* targetBatch = nullptr;
Chris Craikc3566d02013-02-04 16:16:33 -0800515
Chris Craik527a3aa2013-03-04 10:19:31 -0800516 // insertion point of a new batch, will hopefully be immediately after similar batch
517 // (eventually, should be similar shader)
518 int insertBatchIndex = mBatches.size();
Chris Craikc3566d02013-02-04 16:16:33 -0800519 if (!mBatches.isEmpty()) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700520 if (state->mBounds.isEmpty()) {
Chris Craikc3566d02013-02-04 16:16:33 -0800521 // don't know the bounds for op, so add to last batch and start from scratch on next op
Chris Craik28ce94a2013-05-31 11:38:03 -0700522 DrawBatch* b = new DrawBatch(deferInfo);
Chris Craikc1c5f082013-09-11 16:23:37 -0700523 b->add(op, state, deferInfo.opaqueOverBounds);
Chris Craik527a3aa2013-03-04 10:19:31 -0800524 mBatches.add(b);
525 resetBatchingState();
Chris Craikc3566d02013-02-04 16:16:33 -0800526#if DEBUG_DEFER
527 DEFER_LOGD("Warning: Encountered op with empty bounds, resetting batches");
528 op->output(2);
529#endif
530 return;
531 }
532
Chris Craik28ce94a2013-05-31 11:38:03 -0700533 if (deferInfo.mergeable) {
Chris Craik527a3aa2013-03-04 10:19:31 -0800534 // Try to merge with any existing batch with same mergeId.
Chris Craik28ce94a2013-05-31 11:38:03 -0700535 if (mMergingBatches[deferInfo.batchId].get(deferInfo.mergeId, targetBatch)) {
Chris Craikc1c5f082013-09-11 16:23:37 -0700536 if (!((MergingDrawBatch*) targetBatch)->canMergeWith(op, state)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800537 targetBatch = nullptr;
Chris Craik527a3aa2013-03-04 10:19:31 -0800538 }
539 }
540 } else {
541 // join with similar, non-merging batch
Chris Craik28ce94a2013-05-31 11:38:03 -0700542 targetBatch = (DrawBatch*)mBatchLookup[deferInfo.batchId];
Chris Craik527a3aa2013-03-04 10:19:31 -0800543 }
544
Chris Craik28ce94a2013-05-31 11:38:03 -0700545 if (targetBatch || deferInfo.mergeable) {
Chris Craikc3566d02013-02-04 16:16:33 -0800546 // iterate back toward target to see if anything drawn since should overlap the new op
Chris Craik527a3aa2013-03-04 10:19:31 -0800547 // if no target, merging ops still interate to find similar batch to insert after
548 for (int i = mBatches.size() - 1; i >= mEarliestBatchIndex; i--) {
549 DrawBatch* overBatch = (DrawBatch*)mBatches[i];
550
551 if (overBatch == targetBatch) break;
552
553 // TODO: also consider shader shared between batch types
Chris Craik28ce94a2013-05-31 11:38:03 -0700554 if (deferInfo.batchId == overBatch->getBatchId()) {
Chris Craik527a3aa2013-03-04 10:19:31 -0800555 insertBatchIndex = i + 1;
556 if (!targetBatch) break; // found insert position, quit
557 }
558
Chris Craikc1c5f082013-09-11 16:23:37 -0700559 if (overBatch->intersects(state->mBounds)) {
Chris Craik527a3aa2013-03-04 10:19:31 -0800560 // NOTE: it may be possible to optimize for special cases where two operations
561 // of the same batch/paint could swap order, such as with a non-mergeable
562 // (clipped) and a mergeable text operation
Chris Craikd41c4d82015-01-05 15:51:13 -0800563 targetBatch = nullptr;
Chris Craikc3566d02013-02-04 16:16:33 -0800564#if DEBUG_DEFER
Chris Craikc1c5f082013-09-11 16:23:37 -0700565 DEFER_LOGD("op couldn't join batch %p, was intersected by batch %d",
566 targetBatch, i);
Chris Craikc3566d02013-02-04 16:16:33 -0800567 op->output(2);
568#endif
569 break;
570 }
571 }
572 }
573 }
Chris Craik527a3aa2013-03-04 10:19:31 -0800574
Chris Craikc3566d02013-02-04 16:16:33 -0800575 if (!targetBatch) {
Chris Craik28ce94a2013-05-31 11:38:03 -0700576 if (deferInfo.mergeable) {
Chris Craik0e87f002013-06-19 16:54:59 -0700577 targetBatch = new MergingDrawBatch(deferInfo,
578 renderer.getViewportWidth(), renderer.getViewportHeight());
Chris Craik28ce94a2013-05-31 11:38:03 -0700579 mMergingBatches[deferInfo.batchId].put(deferInfo.mergeId, targetBatch);
Chris Craik527a3aa2013-03-04 10:19:31 -0800580 } else {
Chris Craik28ce94a2013-05-31 11:38:03 -0700581 targetBatch = new DrawBatch(deferInfo);
582 mBatchLookup[deferInfo.batchId] = targetBatch;
Chris Craikc3566d02013-02-04 16:16:33 -0800583 }
Chris Craik527a3aa2013-03-04 10:19:31 -0800584
Chris Craikf70119c2013-06-13 11:21:22 -0700585 DEFER_LOGD("creating %singBatch %p, bid %x, at %d",
586 deferInfo.mergeable ? "Merg" : "Draw",
587 targetBatch, deferInfo.batchId, insertBatchIndex);
Chris Craik527a3aa2013-03-04 10:19:31 -0800588 mBatches.insertAt(targetBatch, insertBatchIndex);
Chris Craikc3566d02013-02-04 16:16:33 -0800589 }
Chris Craik527a3aa2013-03-04 10:19:31 -0800590
Chris Craikc1c5f082013-09-11 16:23:37 -0700591 targetBatch->add(op, state, deferInfo.opaqueOverBounds);
Chris Craikc3566d02013-02-04 16:16:33 -0800592}
593
Chris Craikff785832013-03-08 13:12:16 -0800594void DeferredDisplayList::storeStateOpBarrier(OpenGLRenderer& renderer, StateOp* op) {
595 DEFER_LOGD("%p adding state op barrier at pos %d", this, mBatches.size());
596
Chris Craikc1c5f082013-09-11 16:23:37 -0700597 DeferredDisplayState* state = createState();
598 renderer.storeDisplayState(*state, getStateOpDeferFlags());
599 mBatches.add(new StateOpBatch(op, state));
Chris Craikff785832013-03-08 13:12:16 -0800600 resetBatchingState();
601}
602
Chris Craik7273daa2013-03-28 11:25:24 -0700603void DeferredDisplayList::storeRestoreToCountBarrier(OpenGLRenderer& renderer, StateOp* op,
604 int newSaveCount) {
Chris Craikff785832013-03-08 13:12:16 -0800605 DEFER_LOGD("%p adding restore to count %d barrier, pos %d",
606 this, newSaveCount, mBatches.size());
607
Chris Craik7273daa2013-03-28 11:25:24 -0700608 // store displayState for the restore operation, as it may be associated with a saveLayer that
609 // doesn't have kClip_SaveFlag set
Chris Craikc1c5f082013-09-11 16:23:37 -0700610 DeferredDisplayState* state = createState();
611 renderer.storeDisplayState(*state, getStateOpDeferFlags());
612 mBatches.add(new RestoreToCountBatch(op, state, newSaveCount));
Chris Craikff785832013-03-08 13:12:16 -0800613 resetBatchingState();
614}
615
616/////////////////////////////////////////////////////////////////////////////////
617// Replay / flush
618/////////////////////////////////////////////////////////////////////////////////
619
Tom Hudson107843d2014-09-08 11:26:26 -0400620static void replayBatchList(const Vector<Batch*>& batchList,
Chris Craikff785832013-03-08 13:12:16 -0800621 OpenGLRenderer& renderer, Rect& dirty) {
Chris Craikff785832013-03-08 13:12:16 -0800622
Chris Craikff785832013-03-08 13:12:16 -0800623 for (unsigned int i = 0; i < batchList.size(); i++) {
Chris Craik28ce94a2013-05-31 11:38:03 -0700624 if (batchList[i]) {
Tom Hudson107843d2014-09-08 11:26:26 -0400625 batchList[i]->replay(renderer, dirty, i);
Chris Craik28ce94a2013-05-31 11:38:03 -0700626 }
Chris Craikff785832013-03-08 13:12:16 -0800627 }
Chris Craik527a3aa2013-03-04 10:19:31 -0800628 DEFER_LOGD("--flushed, drew %d batches", batchList.size());
Chris Craikff785832013-03-08 13:12:16 -0800629}
630
Tom Hudson107843d2014-09-08 11:26:26 -0400631void DeferredDisplayList::flush(OpenGLRenderer& renderer, Rect& dirty) {
Chris Craikff785832013-03-08 13:12:16 -0800632 ATRACE_NAME("flush drawing commands");
Romain Guycf51a412013-04-08 19:40:31 -0700633 Caches::getInstance().fontRenderer->endPrecaching();
634
Tom Hudson107843d2014-09-08 11:26:26 -0400635 if (isEmpty()) return; // nothing to flush
Chris Craika4e16c52013-03-22 10:00:48 -0700636 renderer.restoreToCount(1);
Chris Craikc3566d02013-02-04 16:16:33 -0800637
638 DEFER_LOGD("--flushing");
Romain Guy0f667532013-03-01 14:31:04 -0800639 renderer.eventMark("Flush");
640
Chris Craika4e16c52013-03-22 10:00:48 -0700641 // save and restore (with draw modifiers) so that reordering doesn't affect final state
Chris Craikd90144d2013-03-19 15:03:48 -0700642 DrawModifiers restoreDrawModifiers = renderer.getDrawModifiers();
Chris Craika4e16c52013-03-22 10:00:48 -0700643 renderer.save(SkCanvas::kMatrix_SaveFlag | SkCanvas::kClip_SaveFlag);
644
Chris Craikef8d6f22014-12-17 11:10:28 -0800645 if (CC_LIKELY(mAvoidOverdraw)) {
646 for (unsigned int i = 1; i < mBatches.size(); i++) {
647 if (mBatches[i] && mBatches[i]->coversBounds(mBounds)) {
648 discardDrawingBatches(i - 1);
649 }
Chris Craik28ce94a2013-05-31 11:38:03 -0700650 }
651 }
Chris Craik1ed30c92013-04-03 12:37:35 -0700652 // NOTE: depth of the save stack at this point, before playback, should be reflected in
653 // FLUSH_SAVE_STACK_DEPTH, so that save/restores match up correctly
Tom Hudson107843d2014-09-08 11:26:26 -0400654 replayBatchList(mBatches, renderer, dirty);
Chris Craika4e16c52013-03-22 10:00:48 -0700655
656 renderer.restoreToCount(1);
Chris Craikd90144d2013-03-19 15:03:48 -0700657 renderer.setDrawModifiers(restoreDrawModifiers);
Chris Craikc3566d02013-02-04 16:16:33 -0800658
Chris Craikff785832013-03-08 13:12:16 -0800659 DEFER_LOGD("--flush complete, returning %x", status);
Chris Craikc3566d02013-02-04 16:16:33 -0800660 clear();
Chris Craikc3566d02013-02-04 16:16:33 -0800661}
662
Chris Craikf70119c2013-06-13 11:21:22 -0700663void DeferredDisplayList::discardDrawingBatches(const unsigned int maxIndex) {
Chris Craik28ce94a2013-05-31 11:38:03 -0700664 for (unsigned int i = mEarliestUnclearedIndex; i <= maxIndex; i++) {
Chris Craikf70119c2013-06-13 11:21:22 -0700665 // leave deferred state ops alone for simplicity (empty save restore pairs may now exist)
Chris Craik28ce94a2013-05-31 11:38:03 -0700666 if (mBatches[i] && mBatches[i]->purelyDrawBatch()) {
Chris Craik28ce94a2013-05-31 11:38:03 -0700667 delete mBatches[i];
Chris Craikd41c4d82015-01-05 15:51:13 -0800668 mBatches.replaceAt(nullptr, i);
Chris Craik28ce94a2013-05-31 11:38:03 -0700669 }
670 }
671 mEarliestUnclearedIndex = maxIndex + 1;
672}
673
Chris Craikc3566d02013-02-04 16:16:33 -0800674}; // namespace uirenderer
675}; // namespace android