blob: 469ffbab682fd5a7eb3059ec4e1627ced651163b [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08002 * Copyright (C) 2012-2014, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -080041bool MDPComp::sHandleTimeout = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070042bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050043bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070044bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070045bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080046int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080047double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080048double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053049bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070050
Saurabh Shah88e4d272013-09-03 13:31:29 -070051MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
52 if(isDisplaySplit(ctx, dpy)) {
53 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080054 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070055 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080056}
57
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080058MDPComp::MDPComp(int dpy):mDpy(dpy){};
59
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080060void MDPComp::dump(android::String8& buf)
61{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070062 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
63 return;
64
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080065 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070066 (mDpy == 0) ? "\"PRIMARY\"" :
67 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070068 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
69 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080070 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
71 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
72 (mCurrentFrame.needsRedraw? "YES" : "NO"),
73 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
74 dumpsys_log(buf," --------------------------------------------- \n");
75 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
76 dumpsys_log(buf," --------------------------------------------- \n");
77 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
78 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
79 index,
80 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070081 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080082 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070083 (mCurrentFrame.drop[index] ? "DROP" :
84 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080085 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
86 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
87 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080088}
89
90bool MDPComp::init(hwc_context_t *ctx) {
91
92 if(!ctx) {
93 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
94 return false;
95 }
96
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080097 char property[PROPERTY_VALUE_MAX];
98
99 sEnabled = false;
100 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800101 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
102 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800103 sEnabled = true;
104 }
105
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700106 sEnableMixedMode = true;
107 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
108 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
109 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
110 sEnableMixedMode = false;
111 }
112
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800113 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
114 if(atoi(property) != 0)
115 sDebugLogs = true;
116 }
117
Jeykumar Sankaran7d6c4c02014-02-19 12:40:57 -0800118 if(property_get("persist.hwc.partialupdate", property, NULL) > 0) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700119 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
120 qdutils::MDPVersion::getInstance().is8x74v2())
121 sEnablePartialFrameUpdate = true;
122 }
123 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
124 sEnablePartialFrameUpdate);
125
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800126 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700127 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
128 int val = atoi(property);
129 if(val >= 0)
130 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800131 }
132
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400133 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
134 // Idle invalidation is not necessary on command mode panels
135 long idle_timeout = DEFAULT_IDLE_TIME;
136 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
137 if(atoi(property) != 0)
138 idle_timeout = atoi(property);
139 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800140
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400141 //create Idle Invalidator only when not disabled through property
142 if(idle_timeout != -1)
143 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800144
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400145 if(idleInvalidator == NULL) {
146 ALOGE("%s: failed to instantiate idleInvalidator object",
147 __FUNCTION__);
148 } else {
149 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
150 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800151 }
radhakrishnac9a67412013-09-25 17:40:42 +0530152
153 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
154 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
155 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
156 sEnable4k2kYUVSplit = true;
157 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700158 return true;
159}
160
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800161void MDPComp::reset(hwc_context_t *ctx) {
162 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700163 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800164 ctx->mOverlay->clear(mDpy);
165 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700166}
167
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700168void MDPComp::timeout_handler(void *udata) {
169 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
170
171 if(!ctx) {
172 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
173 return;
174 }
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -0800175 Locker::Autolock _l(ctx->mDrawLock);
176 // Handle timeout event only if the previous composition is MDP or MIXED.
177 if(!sHandleTimeout) {
178 ALOGD_IF(isDebug(), "%s:Do not handle this timeout", __FUNCTION__);
179 return;
180 }
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182 ALOGE("%s: HWC proc not registered", __FUNCTION__);
183 return;
184 }
185 sIdleFallBack = true;
186 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700187 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700188}
189
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800190void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800191 hwc_display_contents_1_t* list) {
192 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800193
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800195 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800196 if(!mCurrentFrame.isFBComposed[index]) {
197 layerProp[index].mFlags |= HWC_MDPCOMP;
198 layer->compositionType = HWC_OVERLAY;
199 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800200 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700201 /* Drop the layer when its already present in FB OR when it lies
202 * outside frame's ROI */
203 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800204 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700205 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800206 }
207 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700208}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500209
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800210void MDPComp::setRedraw(hwc_context_t *ctx,
211 hwc_display_contents_1_t* list) {
212 mCurrentFrame.needsRedraw = false;
213 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
214 (list->flags & HWC_GEOMETRY_CHANGED) ||
215 isSkipPresent(ctx, mDpy)) {
216 mCurrentFrame.needsRedraw = true;
217 }
218}
219
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700221 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800222}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800223
Saurabh Shahaa236822013-04-24 18:07:26 -0700224void MDPComp::FrameInfo::reset(const int& numLayers) {
225 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800226 if(mdpToLayer[i].pipeInfo) {
227 delete mdpToLayer[i].pipeInfo;
228 mdpToLayer[i].pipeInfo = NULL;
229 //We dont own the rotator
230 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800231 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800232 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800233
234 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
235 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700236 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800237
Saurabh Shahaa236822013-04-24 18:07:26 -0700238 layerCount = numLayers;
239 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800240 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700241 needsRedraw = true;
Saurabh Shahd53bc5f2014-02-05 10:17:43 -0800242 fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800243}
244
Saurabh Shahaa236822013-04-24 18:07:26 -0700245void MDPComp::FrameInfo::map() {
246 // populate layer and MDP maps
247 int mdpIdx = 0;
248 for(int idx = 0; idx < layerCount; idx++) {
249 if(!isFBComposed[idx]) {
250 mdpToLayer[mdpIdx].listIndex = idx;
251 layerToMDP[idx] = mdpIdx++;
252 }
253 }
254}
255
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800256MDPComp::LayerCache::LayerCache() {
257 reset();
258}
259
260void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700261 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530262 memset(&isFBComposed, true, sizeof(isFBComposed));
263 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800264 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700265}
266
267void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
268 const int numAppLayers = list->numHwLayers - 1;
269 for(int i = 0; i < numAppLayers; i++) {
270 hnd[i] = list->hwLayers[i].handle;
271 }
272}
273
274void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700275 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
277 memcpy(&drop, &curFrame.drop, sizeof(drop));
278}
279
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800280bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
281 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530282 if(layerCount != curFrame.layerCount)
283 return false;
284 for(int i = 0; i < curFrame.layerCount; i++) {
285 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
286 (curFrame.drop[i] != drop[i])) {
287 return false;
288 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800289 if(curFrame.isFBComposed[i] &&
290 (hnd[i] != list->hwLayers[i].handle)){
291 return false;
292 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530293 }
294 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800295}
296
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700297bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
298 private_handle_t *hnd = (private_handle_t *)layer->handle;
299 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
300 (not isValidDimension(ctx,layer))
301 //More conditions here, SKIP, sRGB+Blend etc
302 ) {
303 return false;
304 }
305 return true;
306}
307
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530308bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800309 private_handle_t *hnd = (private_handle_t *)layer->handle;
310
311 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700312 if (layer->flags & HWC_COLOR_FILL) {
313 // Color layer
314 return true;
315 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800316 ALOGE("%s: layer handle is NULL", __FUNCTION__);
317 return false;
318 }
319
Naseer Ahmede850a802013-09-06 13:12:52 -0400320 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400321 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400322 return false;
323
Saurabh Shah62e1d732013-09-17 10:44:05 -0700324 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700325 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700326 int crop_w = crop.right - crop.left;
327 int crop_h = crop.bottom - crop.top;
328 int dst_w = dst.right - dst.left;
329 int dst_h = dst.bottom - dst.top;
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800330 float w_scale = ((float)crop_w / (float)dst_w);
331 float h_scale = ((float)crop_h / (float)dst_h);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700332
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800333 /* Workaround for MDP HW limitation in DSI command mode panels where
334 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
335 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530336 * There also is a HW limilation in MDP, minimum block size is 2x2
337 * Fallback to GPU if height is less than 2.
338 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800339 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800340 return false;
341
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800342 if((w_scale > 1.0f) || (h_scale > 1.0f)) {
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800343 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700344 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800345 const float w_dscale = w_scale;
346 const float h_dscale = h_scale;
347
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800348 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
349 /* Workaround for downscales larger than 4x.
350 * Will be removed once decimator block is enabled for MDSS
351 */
352 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
353 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
354 h_dscale > downscale)
355 return false;
356 } else {
357 if(w_dscale > 64 || h_dscale > 64)
358 return false;
359 }
360 } else { //A-family
361 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700362 return false;
363 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700364 }
365
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800366 if((w_scale < 1.0f) || (h_scale < 1.0f)) {
367 const uint32_t upscale =
368 qdutils::MDPVersion::getInstance().getMaxMDPUpscale();
369 const float w_uscale = 1.0f / w_scale;
370 const float h_uscale = 1.0f / h_scale;
371
372 if(w_uscale > upscale || h_uscale > upscale)
373 return false;
374 }
375
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800376 return true;
377}
378
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700379ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
380 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800381 overlay::Overlay& ov = *ctx->mOverlay;
382 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
383
384 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800385 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700386 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800387 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800388 return mdp_pipe;
389 }
390 case MDPCOMP_OV_ANY:
391 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700392 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800393 if(mdp_pipe != ovutils::OV_INVALID) {
394 return mdp_pipe;
395 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800396
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800397 if(type == MDPCOMP_OV_RGB) {
398 //Requested only for RGB pipe
399 break;
400 }
401 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700402 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800403 default:
404 ALOGE("%s: Invalid pipe type",__FUNCTION__);
405 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800406 };
407 return ovutils::OV_INVALID;
408}
409
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800410bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700411 bool ret = true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800412
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800413 if(!isEnabled()) {
414 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700415 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700416 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800417 ctx->mVideoTransFlag &&
418 isSecondaryConnected(ctx)) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700419 //1 Padding round to shift pipes across mixers
420 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
421 __FUNCTION__);
422 ret = false;
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800423 } else if(isSecondaryConfiguring(ctx)) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800424 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800425 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700426 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700427 } else if(ctx->isPaddingRound) {
Raj Kamal9ed3d6b2014-02-07 16:15:17 +0530428 ALOGD_IF(isDebug(), "%s: padding round invoked for dpy %d",
429 __FUNCTION__,mDpy);
Saurabh Shahaa236822013-04-24 18:07:26 -0700430 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700431 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700432 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800433}
434
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800435/*
436 * 1) Identify layers that are not visible in the updating ROI and drop them
437 * from composition.
438 * 2) If we have a scaling layers which needs cropping against generated ROI.
439 * Reset ROI to full resolution.
440 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700441bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
442 hwc_display_contents_1_t* list, hwc_rect_t roi) {
443 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
444
445 if(!isValidRect(roi))
446 return false;
447
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800448 hwc_rect_t visibleRect = roi;
449
450 for(int i = numAppLayers - 1; i >= 0; i--){
451
452 if(!isValidRect(visibleRect)) {
453 mCurrentFrame.drop[i] = true;
454 mCurrentFrame.dropCount++;
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800455 continue;
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800456 }
457
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700458 const hwc_layer_1_t* layer = &list->hwLayers[i];
459
460 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700461 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700462
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800463 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700464
465 int res_w = res.right - res.left;
466 int res_h = res.bottom - res.top;
467 int dst_w = dstRect.right - dstRect.left;
468 int dst_h = dstRect.bottom - dstRect.top;
469
470 if(!isValidRect(res)) {
471 mCurrentFrame.drop[i] = true;
472 mCurrentFrame.dropCount++;
473 }else {
474 /* Reset frame ROI when any layer which needs scaling also needs ROI
475 * cropping */
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800476 if((res_w != dst_w || res_h != dst_h) && needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800477 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700478 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
479 mCurrentFrame.dropCount = 0;
480 return false;
481 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800482
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800483 /* deduct any opaque region from visibleRect */
484 if (layer->blending == HWC_BLENDING_NONE)
485 visibleRect = deductRect(visibleRect, res);
486 }
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700487 }
488 return true;
489}
490
491void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
492 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
493
494 if(!sEnablePartialFrameUpdate) {
495 return;
496 }
497
498 if(mDpy || isDisplaySplit(ctx, mDpy)){
499 ALOGE_IF(isDebug(), "%s: ROI not supported for"
500 "the (1) external / virtual display's (2) dual DSI displays",
501 __FUNCTION__);
502 return;
503 }
504
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800505 if(isSkipPresent(ctx, mDpy))
506 return;
507
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700508 if(list->flags & HWC_GEOMETRY_CHANGED)
509 return;
510
511 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
512 for(int index = 0; index < numAppLayers; index++ ) {
513 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
514 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
515 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700516 hwc_rect_t srcRect = integerizeSourceCrop(
517 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700518
519 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700520 roi = getUnion(roi, dstRect);
521 }
522 }
523
524 if(!validateAndApplyROI(ctx, list, roi)){
525 roi = (struct hwc_rect) {0, 0,
526 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
527 }
528
529 ctx->listStats[mDpy].roi.x = roi.left;
530 ctx->listStats[mDpy].roi.y = roi.top;
531 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
532 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
533
534 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
535 roi.left, roi.top, roi.right, roi.bottom);
536}
537
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800538/* Checks for conditions where all the layers marked for MDP comp cannot be
539 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800540bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800541 hwc_display_contents_1_t* list){
542
Saurabh Shahaa236822013-04-24 18:07:26 -0700543 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800544 int priDispW = ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800545
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700546 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700547 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
548 return false;
549 }
550
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800551 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700552 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
553 __FUNCTION__,
554 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800555 return false;
556 }
557
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800558 if(mDpy > HWC_DISPLAY_PRIMARY && (priDispW > MAX_DISPLAY_DIM) &&
559 (ctx->dpyAttr[mDpy].xres < MAX_DISPLAY_DIM)) {
560 // Disable MDP comp on Secondary when the primary is highres panel and
561 // the secondary is a normal 1080p, because, MDP comp on secondary under
562 // in such usecase, decimation gets used for downscale and there will be
563 // a quality mismatch when there will be a fallback to GPU comp
564 ALOGD_IF(isDebug(), "%s: Disable MDP Compositon for Secondary Disp",
565 __FUNCTION__);
566 return false;
567 }
568
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800569 // check for action safe flag and downscale mode which requires scaling.
570 if(ctx->dpyAttr[mDpy].mActionSafePresent
571 || ctx->dpyAttr[mDpy].mDownScaleMode) {
572 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
573 return false;
574 }
575
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800576 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800577 hwc_layer_1_t* layer = &list->hwLayers[i];
578 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800579
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700580 if(isYuvBuffer(hnd) && has90Transform(layer)) {
581 if(!canUseRotator(ctx, mDpy)) {
582 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
583 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700584 return false;
585 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800586 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530587
588 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
589 // may not need it if Gfx pre-rotation can handle all flips & rotations
590 if(qdutils::MDPVersion::getInstance().is8x26() &&
591 (ctx->dpyAttr[mDpy].xres > 1024) &&
592 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
593 (!isYuvBuffer(hnd)))
594 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800595 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700596
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700597 if(ctx->mAD->isDoable()) {
598 return false;
599 }
600
Saurabh Shahaa236822013-04-24 18:07:26 -0700601 //If all above hard conditions are met we can do full or partial MDP comp.
602 bool ret = false;
603 if(fullMDPComp(ctx, list)) {
604 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700605 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700606 ret = true;
607 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530608
Saurabh Shahaa236822013-04-24 18:07:26 -0700609 return ret;
610}
611
612bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700613 //Will benefit presentation / secondary-only layer.
614 if((mDpy > HWC_DISPLAY_PRIMARY) &&
615 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
616 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
617 return false;
618 }
619
620 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
621 for(int i = 0; i < numAppLayers; i++) {
622 hwc_layer_1_t* layer = &list->hwLayers[i];
623 if(not isSupportedForMDPComp(ctx, layer)) {
624 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
625 return false;
626 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800627
628 //For 8x26, if there is only one layer which needs scale for secondary
629 //while no scale for primary display, DMA pipe is occupied by primary.
630 //If need to fall back to GLES composition, virtual display lacks DMA
631 //pipe and error is reported.
632 if(qdutils::MDPVersion::getInstance().is8x26() &&
633 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530634 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800635 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700636 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800637
Saurabh Shahaa236822013-04-24 18:07:26 -0700638 mCurrentFrame.fbCount = 0;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700639 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
640 sizeof(mCurrentFrame.isFBComposed));
641 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
642 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700643
radhakrishnac9a67412013-09-25 17:40:42 +0530644 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800645 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530646 }
647
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800648 if(!postHeuristicsHandling(ctx, list)) {
649 ALOGD_IF(isDebug(), "post heuristic handling failed");
650 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700651 return false;
652 }
653
Saurabh Shahaa236822013-04-24 18:07:26 -0700654 return true;
655}
656
657bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
658{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700659 if(!sEnableMixedMode) {
660 //Mixed mode is disabled. No need to even try caching.
661 return false;
662 }
663
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700664 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800665 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
666 ret = loadBasedCompPreferGPU(ctx, list) or
667 loadBasedCompPreferMDP(ctx, list) or
668 cacheBasedComp(ctx, list);
669 } else {
670 ret = cacheBasedComp(ctx, list) or
671 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800672 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700673 }
674
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700675 return ret;
676}
677
678bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
679 hwc_display_contents_1_t* list) {
680 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700681 mCurrentFrame.reset(numAppLayers);
682 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700683
684 //If an MDP marked layer is unsupported cannot do partial MDP Comp
685 for(int i = 0; i < numAppLayers; i++) {
686 if(!mCurrentFrame.isFBComposed[i]) {
687 hwc_layer_1_t* layer = &list->hwLayers[i];
688 if(not isSupportedForMDPComp(ctx, layer)) {
689 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
690 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800691 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700692 return false;
693 }
694 }
695 }
696
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700697 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530698 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700699 if(!ret) {
700 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800701 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700702 return false;
703 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700704
705 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700706
radhakrishnac9a67412013-09-25 17:40:42 +0530707 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800708 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530709 }
710
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700711 //Will benefit cases where a video has non-updating background.
712 if((mDpy > HWC_DISPLAY_PRIMARY) and
713 (mdpCount > MAX_SEC_LAYERS)) {
714 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800715 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700716 return false;
717 }
718
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800719 if(!postHeuristicsHandling(ctx, list)) {
720 ALOGD_IF(isDebug(), "post heuristic handling failed");
721 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700722 return false;
723 }
724
Saurabh Shahaa236822013-04-24 18:07:26 -0700725 return true;
726}
727
Saurabh Shahb772ae32013-11-18 15:40:02 -0800728bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700729 hwc_display_contents_1_t* list) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800730 if(not isLoadBasedCompDoable(ctx)) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800731 return false;
732 }
733
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700734 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
735 mCurrentFrame.reset(numAppLayers);
736
Saurabh Shahb772ae32013-11-18 15:40:02 -0800737 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
738 mDpy, Overlay::MIXER_DEFAULT));
739 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800740 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
741 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800742
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700743 if(batchSize <= 0) {
744 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
745 return false;
746 }
747
748 int minBatchStart = -1;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800749 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700750 size_t minBatchPixelCount = SIZE_MAX;
751
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800752 /* Iterate through the layer list to find out a contigous batch of batchSize
753 * non-dropped layers with loweest pixel count */
754 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
755 if(mCurrentFrame.drop[i])
756 continue;
757
758 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700759 uint32_t batchPixelCount = 0;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800760 int j = i;
761 for(; j < numAppLayers && batchCount; j++){
762 if(!mCurrentFrame.drop[j]) {
763 hwc_layer_1_t* layer = &list->hwLayers[j];
764 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
765 hwc_rect_t dst = layer->displayFrame;
766
767 /* If we have a valid ROI, count pixels only for the MDP fetched
768 * region of the buffer */
769 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
770 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
771 hwc_rect_t roi;
772 roi.left = ctx->listStats[mDpy].roi.x;
773 roi.top = ctx->listStats[mDpy].roi.y;
774 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
775 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
776
777 /* valid ROI means no scaling layer is composed. So check
778 * only intersection to find actual fetched pixels */
779 crop = getIntersection(roi, dst);
780 }
781
782 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700783 (crop.bottom - crop.top);
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800784 batchCount--;
785 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700786 }
787
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800788 /* we dont want to program any batch of size lesser than batchSize */
789 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700790 minBatchPixelCount = batchPixelCount;
791 minBatchStart = i;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800792 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700793 }
794 }
795
796 if(minBatchStart < 0) {
797 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
798 __FUNCTION__, batchSize, numAppLayers);
799 return false;
800 }
801
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800802 /* non-dropped layers falling ouside the selected batch will be marked for
803 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700804 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800805 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700806 hwc_layer_1_t* layer = &list->hwLayers[i];
807 if(not isSupportedForMDPComp(ctx, layer)) {
808 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
809 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800810 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700811 return false;
812 }
813 mCurrentFrame.isFBComposed[i] = false;
814 }
815 }
816
817 mCurrentFrame.fbZ = minBatchStart;
818 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800819 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
820 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700821
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800822 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
823 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
824 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800825
radhakrishnac9a67412013-09-25 17:40:42 +0530826 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800827 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530828 }
829
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800830 if(!postHeuristicsHandling(ctx, list)) {
831 ALOGD_IF(isDebug(), "post heuristic handling failed");
832 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700833 return false;
834 }
835
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700836 return true;
837}
838
Saurabh Shahb772ae32013-11-18 15:40:02 -0800839bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
840 hwc_display_contents_1_t* list) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800841 if(not isLoadBasedCompDoable(ctx)) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800842 return false;
843 }
844
Saurabh Shahb772ae32013-11-18 15:40:02 -0800845 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800846 mCurrentFrame.reset(numAppLayers);
847
Saurabh Shahb772ae32013-11-18 15:40:02 -0800848 //Full screen is from ib perspective, not actual full screen
849 const int bpp = 4;
850 double panelRefRate =
851 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
852
853 double bwLeft = sMaxBw - sBwClaimed;
854
855 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
856 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
857
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800858 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
859 - (fullScreenLayers - 1);
860
Saurabh Shahb772ae32013-11-18 15:40:02 -0800861 //If batch size is not at least 2, we aren't really preferring MDP, since
862 //only 1 layer going to GPU could actually translate into an entire FB
863 //needed to be fetched by MDP, thus needing more b/w rather than less.
864 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
865 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
866 return false;
867 }
868
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800869 //Find top fbBatchSize non-dropped layers to get your batch
870 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
871 for(int i = numAppLayers - 1; i >= 0; i--) {
872 if(mCurrentFrame.drop[i])
873 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800874
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800875 if(fbEnd < 0)
876 fbEnd = i;
877
878 if(!(--batchCount)) {
879 fbStart = i;
880 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800881 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800882 }
883
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800884 //Bottom layers constitute MDP batch
885 for(int i = 0; i < fbStart; i++) {
886 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
887 hwc_layer_1_t* layer = &list->hwLayers[i];
888 if(not isSupportedForMDPComp(ctx, layer)) {
889 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
890 __FUNCTION__, i);
891 reset(ctx);
892 return false;
893 }
894 mCurrentFrame.isFBComposed[i] = false;
895 }
896 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800897
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800898 mCurrentFrame.fbZ = fbStart;
899 mCurrentFrame.fbCount = fbBatchSize;
900 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
901 - mCurrentFrame.dropCount;
902
903 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
904 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
905 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800906
radhakrishnac9a67412013-09-25 17:40:42 +0530907 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800908 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530909 }
910
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800911 if(!postHeuristicsHandling(ctx, list)) {
912 ALOGD_IF(isDebug(), "post heuristic handling failed");
913 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800914 return false;
915 }
916
Saurabh Shahb772ae32013-11-18 15:40:02 -0800917 return true;
918}
919
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800920bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530921 if(mDpy or isSecurePresent(ctx, mDpy) or
922 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700923 return false;
924 }
925 return true;
926}
927
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800928bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
929 hwc_display_contents_1_t* list) {
930 const bool secureOnly = true;
931 return videoOnlyComp(ctx, list, not secureOnly) or
932 videoOnlyComp(ctx, list, secureOnly);
933}
934
935bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700936 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700937 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700938
Saurabh Shahaa236822013-04-24 18:07:26 -0700939 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700940 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700941 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700942
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800943 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
944 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700945 return false;
946 }
947
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800948 /* Bail out if we are processing only secured video layers
949 * and we dont have any */
950 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800951 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800952 return false;
953 }
954
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800955 if(mCurrentFrame.fbCount)
956 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700957
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800958 if(sEnable4k2kYUVSplit){
959 adjustForSourceSplit(ctx, list);
960 }
961
962 if(!postHeuristicsHandling(ctx, list)) {
963 ALOGD_IF(isDebug(), "post heuristic handling failed");
964 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700965 return false;
966 }
967
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800968 return true;
969}
970
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800971/* Checks for conditions where YUV layers cannot be bypassed */
972bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700973 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700974 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800975 return false;
976 }
977
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700978 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
979 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
980 return false;
981 }
982
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800983 if(isSecuring(ctx, layer)) {
984 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
985 return false;
986 }
987
Saurabh Shah4fdde762013-04-30 18:47:33 -0700988 if(!isValidDimension(ctx, layer)) {
989 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
990 __FUNCTION__);
991 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800992 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700993
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400994 if(layer->planeAlpha < 0xFF) {
995 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
996 in video only mode",
997 __FUNCTION__);
998 return false;
999 }
1000
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001001 return true;
1002}
1003
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301004/* starts at fromIndex and check for each layer to find
1005 * if it it has overlapping with any Updating layer above it in zorder
1006 * till the end of the batch. returns true if it finds any intersection */
1007bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
1008 int fromIndex, int toIndex) {
1009 for(int i = fromIndex; i < toIndex; i++) {
1010 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1011 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
1012 return false;
1013 }
1014 }
1015 }
1016 return true;
1017}
1018
1019/* Checks if given layer at targetLayerIndex has any
1020 * intersection with all the updating layers in beween
1021 * fromIndex and toIndex. Returns true if it finds intersectiion */
1022bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1023 int fromIndex, int toIndex, int targetLayerIndex) {
1024 for(int i = fromIndex; i <= toIndex; i++) {
1025 if(!mCurrentFrame.isFBComposed[i]) {
1026 if(areLayersIntersecting(&list->hwLayers[i],
1027 &list->hwLayers[targetLayerIndex])) {
1028 return true;
1029 }
1030 }
1031 }
1032 return false;
1033}
1034
1035int MDPComp::getBatch(hwc_display_contents_1_t* list,
1036 int& maxBatchStart, int& maxBatchEnd,
1037 int& maxBatchCount) {
1038 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301039 int fbZOrder =-1;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001040 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301041 while (i < mCurrentFrame.layerCount) {
1042 int batchCount = 0;
1043 int batchStart = i;
1044 int batchEnd = i;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001045 /* Adjust batch Z order with the dropped layers so far */
1046 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301047 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301048 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301049 while(i < mCurrentFrame.layerCount) {
1050 if(!mCurrentFrame.isFBComposed[i]) {
1051 if(!batchCount) {
1052 i++;
1053 break;
1054 }
1055 updatingLayersAbove++;
1056 i++;
1057 continue;
1058 } else {
1059 if(mCurrentFrame.drop[i]) {
1060 i++;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001061 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301062 continue;
1063 } else if(updatingLayersAbove <= 0) {
1064 batchCount++;
1065 batchEnd = i;
1066 i++;
1067 continue;
1068 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1069
1070 // We have a valid updating layer already. If layer-i not
1071 // have overlapping with all updating layers in between
1072 // batch-start and i, then we can add layer i to batch.
1073 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1074 batchCount++;
1075 batchEnd = i;
1076 i++;
1077 continue;
1078 } else if(canPushBatchToTop(list, batchStart, i)) {
1079 //If All the non-updating layers with in this batch
1080 //does not have intersection with the updating layers
1081 //above in z-order, then we can safely move the batch to
1082 //higher z-order. Increment fbZ as it is moving up.
1083 if( firstZReverseIndex < 0) {
1084 firstZReverseIndex = i;
1085 }
1086 batchCount++;
1087 batchEnd = i;
1088 fbZ += updatingLayersAbove;
1089 i++;
1090 updatingLayersAbove = 0;
1091 continue;
1092 } else {
1093 //both failed.start the loop again from here.
1094 if(firstZReverseIndex >= 0) {
1095 i = firstZReverseIndex;
1096 }
1097 break;
1098 }
1099 }
1100 }
1101 }
1102 if(batchCount > maxBatchCount) {
1103 maxBatchCount = batchCount;
1104 maxBatchStart = batchStart;
1105 maxBatchEnd = batchEnd;
1106 fbZOrder = fbZ;
1107 }
1108 }
1109 return fbZOrder;
1110}
1111
1112bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1113 hwc_display_contents_1_t* list) {
1114 /* Idea is to keep as many non-updating(cached) layers in FB and
1115 * send rest of them through MDP. This is done in 2 steps.
1116 * 1. Find the maximum contiguous batch of non-updating layers.
1117 * 2. See if we can improve this batch size for caching by adding
1118 * opaque layers around the batch, if they don't have
1119 * any overlapping with the updating layers in between.
1120 * NEVER mark an updating layer for caching.
1121 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001122
1123 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001124 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001125 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301126 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001127
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001128 /* Nothing is cached. No batching needed */
1129 if(mCurrentFrame.fbCount == 0) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001130 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001131 }
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001132
1133 /* No MDP comp layers, try to use other comp modes */
1134 if(mCurrentFrame.mdpCount == 0) {
1135 return false;
Saurabh Shahaa236822013-04-24 18:07:26 -07001136 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001137
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301138 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001139
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301140 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001141 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001142 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001143 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301144 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001145 if(!mCurrentFrame.drop[i]){
1146 //If an unsupported layer is being attempted to
1147 //be pulled out we should fail
1148 if(not isSupportedForMDPComp(ctx, layer)) {
1149 return false;
1150 }
1151 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001152 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001153 }
1154 }
1155
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301156 // update the frame data
1157 mCurrentFrame.fbZ = fbZ;
1158 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001159 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001160 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001161
1162 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301163 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001164
1165 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001166}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001167
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001168void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001169 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001170 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001171 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001172
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001173 for(int i = 0; i < numAppLayers; i++) {
1174 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001175 if(!mCurrentFrame.drop[i])
1176 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001177 mCurrentFrame.isFBComposed[i] = true;
1178 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001179 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001180 }
1181 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001182
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001183 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001184 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1185 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001186
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001187 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1188 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1189 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001190}
1191
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001192void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1193 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001194 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1195 for(int index = 0;index < nYuvCount; index++){
1196 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1197 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1198
1199 if(!isYUVDoable(ctx, layer)) {
1200 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1201 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1202 mCurrentFrame.fbCount++;
1203 }
1204 } else {
1205 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001206 private_handle_t *hnd = (private_handle_t *)layer->handle;
1207 if(!secureOnly || isSecureBuffer(hnd)) {
1208 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1209 mCurrentFrame.fbCount--;
1210 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001211 }
1212 }
1213 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001214
1215 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001216 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1217 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001218 mCurrentFrame.fbCount);
1219}
1220
Jeykumar Sankaranc2d78d82014-02-14 14:55:29 -08001221hwc_rect_t MDPComp::getUpdatingFBRect(hwc_context_t *ctx,
1222 hwc_display_contents_1_t* list){
1223 hwc_rect_t fbRect = (struct hwc_rect){0, 0, 0, 0};
1224 hwc_layer_1_t *fbLayer = &list->hwLayers[mCurrentFrame.layerCount];
1225
1226 /* Update only the region of FB needed for composition */
1227 for(int i = 0; i < mCurrentFrame.layerCount; i++ ) {
1228 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1229 hwc_layer_1_t* layer = &list->hwLayers[i];
1230 hwc_rect_t dst = layer->displayFrame;
1231 fbRect = getUnion(fbRect, dst);
1232 }
1233 }
1234 return fbRect;
1235}
1236
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001237bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1238 hwc_display_contents_1_t* list) {
1239
1240 //Capability checks
1241 if(!resourceCheck(ctx, list)) {
1242 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1243 return false;
1244 }
1245
1246 //Limitations checks
1247 if(!hwLimitationsCheck(ctx, list)) {
1248 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1249 return false;
1250 }
1251
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001252 //Configure framebuffer first if applicable
1253 if(mCurrentFrame.fbZ >= 0) {
Jeykumar Sankaranc2d78d82014-02-14 14:55:29 -08001254 hwc_rect_t fbRect = getUpdatingFBRect(ctx, list);
1255 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, fbRect, mCurrentFrame.fbZ))
1256 {
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001257 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1258 __FUNCTION__);
1259 return false;
1260 }
1261 }
1262
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001263 mCurrentFrame.map();
1264
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001265 if(!allocLayerPipes(ctx, list)) {
1266 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001267 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001268 }
1269
1270 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001271 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001272 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001273 int mdpIndex = mCurrentFrame.layerToMDP[index];
1274 hwc_layer_1_t* layer = &list->hwLayers[index];
1275
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301276 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1277 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1278 mdpNextZOrder++;
1279 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001280 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1281 cur_pipe->zOrder = mdpNextZOrder++;
1282
radhakrishnac9a67412013-09-25 17:40:42 +05301283 private_handle_t *hnd = (private_handle_t *)layer->handle;
1284 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1285 if(configure4k2kYuv(ctx, layer,
1286 mCurrentFrame.mdpToLayer[mdpIndex])
1287 != 0 ){
1288 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1289 for layer %d",__FUNCTION__, index);
1290 return false;
1291 }
1292 else{
1293 mdpNextZOrder++;
1294 }
1295 continue;
1296 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001297 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1298 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301299 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001300 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001301 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001302 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001303 }
1304
Saurabh Shaha36be922013-12-16 18:18:39 -08001305 if(!ctx->mOverlay->validateAndSet(mDpy, ctx->dpyAttr[mDpy].fd)) {
1306 ALOGD_IF(isDebug(), "%s: Failed to validate and set overlay for dpy %d"
1307 ,__FUNCTION__, mDpy);
1308 return false;
1309 }
1310
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001311 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001312 return true;
1313}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001314
Saurabh Shah173f4242013-11-20 09:50:12 -08001315bool MDPComp::resourceCheck(hwc_context_t *ctx,
1316 hwc_display_contents_1_t *list) {
1317 const bool fbUsed = mCurrentFrame.fbCount;
1318 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1319 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1320 return false;
1321 }
Saurabh Shah173f4242013-11-20 09:50:12 -08001322 return true;
1323}
1324
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001325double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001326 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001327 double size = 0;
1328 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001329
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001330 //Skip for targets where no device tree value for bw is supplied
1331 if(sMaxBw <= 0.0) {
1332 return 0.0;
1333 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001334
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001335 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1336 if(!mCurrentFrame.isFBComposed[i]) {
1337 hwc_layer_1_t* layer = &list->hwLayers[i];
1338 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001339 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001340 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001341 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001342 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001343 size += (bpp * (crop.right - crop.left) *
1344 (crop.bottom - crop.top) *
1345 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1346 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001347 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001348 }
1349 }
1350
1351 if(mCurrentFrame.fbCount) {
1352 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001353 int tempw, temph;
1354 size += (getBufferSizeAndDimensions(
1355 layer->displayFrame.right - layer->displayFrame.left,
1356 layer->displayFrame.bottom - layer->displayFrame.top,
1357 HAL_PIXEL_FORMAT_RGBA_8888,
1358 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001359 }
1360
1361 return size;
1362}
1363
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301364bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1365 hwc_display_contents_1_t* list) {
1366
1367 //A-family hw limitation:
1368 //If a layer need alpha scaling, MDP can not support.
1369 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1370 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1371 if(!mCurrentFrame.isFBComposed[i] &&
1372 isAlphaScaled( &list->hwLayers[i])) {
1373 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1374 return false;
1375 }
1376 }
1377 }
1378
1379 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1380 //If multiple layers requires downscaling and also they are overlapping
1381 //fall back to GPU since MDSS can not handle it.
1382 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1383 qdutils::MDPVersion::getInstance().is8x26()) {
1384 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1385 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1386 if(!mCurrentFrame.isFBComposed[i] &&
1387 isDownscaleRequired(botLayer)) {
1388 //if layer-i is marked for MDP and needs downscaling
1389 //check if any MDP layer on top of i & overlaps with layer-i
1390 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1391 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1392 if(!mCurrentFrame.isFBComposed[j] &&
1393 isDownscaleRequired(topLayer)) {
1394 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1395 topLayer->displayFrame);
1396 if(isValidRect(r))
1397 return false;
1398 }
1399 }
1400 }
1401 }
1402 }
1403 return true;
1404}
1405
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001406int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001407 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001408 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001409 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001410
Raj Kamal9ed3d6b2014-02-07 16:15:17 +05301411 //Do not cache the information for next draw cycle.
1412 if(numLayers > MAX_NUM_APP_LAYERS or (!numLayers)) {
1413 ALOGI("%s: Unsupported layer count for mdp composition",
1414 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001415 mCachedFrame.reset();
1416 return -1;
1417 }
1418
Saurabh Shahb39f8152013-08-22 10:21:44 -07001419 //reset old data
1420 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001421 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1422 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301423
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001424 // Detect the start of animation and fall back to GPU only once to cache
1425 // all the layers in FB and display FB content untill animation completes.
1426 if(ctx->listStats[mDpy].isDisplayAnimating) {
1427 mCurrentFrame.needsRedraw = false;
1428 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1429 mCurrentFrame.needsRedraw = true;
1430 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1431 }
1432 setMDPCompLayerFlags(ctx, list);
1433 mCachedFrame.updateCounts(mCurrentFrame);
1434 ret = -1;
1435 return ret;
1436 } else {
1437 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1438 }
1439
Saurabh Shahb39f8152013-08-22 10:21:44 -07001440 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001441 if(isFrameDoable(ctx)) {
1442 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001443
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001444 //Convert from kbps to gbps
1445 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1446 if (ctx->mExtDisplay->isConnected() ||
1447 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1448 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001449 }
1450
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001451 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1452 setMDPCompLayerFlags(ctx, list);
1453 } else {
1454 reset(ctx);
1455 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1456 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001457 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001458 }
1459 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001460 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1461 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001462 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001463 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001464
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001465 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001466 ALOGD("GEOMETRY change: %d",
1467 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001468 android::String8 sDump("");
1469 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001470 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001471 }
1472
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001473 mCachedFrame.cacheAll(list);
1474 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001475 double panelRefRate =
1476 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1477 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001478 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001479}
1480
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001481bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx, int index) {
radhakrishnac9a67412013-09-25 17:40:42 +05301482
1483 bool bRet = true;
radhakrishnac9a67412013-09-25 17:40:42 +05301484 int mdpIndex = mCurrentFrame.layerToMDP[index];
1485 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1486 info.pipeInfo = new MdpYUVPipeInfo;
1487 info.rot = NULL;
1488 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1489 ePipeType type = MDPCOMP_OV_VG;
1490
1491 pipe_info.lIndex = ovutils::OV_INVALID;
1492 pipe_info.rIndex = ovutils::OV_INVALID;
1493
1494 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1495 if(pipe_info.lIndex == ovutils::OV_INVALID){
1496 bRet = false;
1497 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1498 __FUNCTION__);
1499 }
1500 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1501 if(pipe_info.rIndex == ovutils::OV_INVALID){
1502 bRet = false;
1503 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1504 __FUNCTION__);
1505 }
1506 return bRet;
1507}
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001508//=============MDPCompNonSplit==================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001509
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001510void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001511 hwc_display_contents_1_t*) {
radhakrishnac9a67412013-09-25 17:40:42 +05301512 //As we split 4kx2k yuv layer and program to 2 VG pipes
1513 //(if available) increase mdpcount accordingly
1514 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001515
1516 //If 4k2k Yuv layer split is possible, and if
1517 //fbz is above 4k2k layer, increment fb zorder by 1
1518 //as we split 4k2k layer and increment zorder for right half
1519 //of the layer
1520 if(mCurrentFrame.fbZ >= 0) {
1521 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1522 for(int index = 0; index < n4k2kYuvCount; index++){
1523 int n4k2kYuvIndex =
1524 ctx->listStats[mDpy].yuv4k2kIndices[index];
1525 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1526 mCurrentFrame.fbZ += 1;
1527 }
1528 }
1529 }
radhakrishnac9a67412013-09-25 17:40:42 +05301530}
1531
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001532/*
1533 * Configures pipe(s) for MDP composition
1534 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001535int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001536 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001537 MdpPipeInfoNonSplit& mdp_info =
1538 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001539 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1540 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1541 eIsFg isFg = IS_FG_OFF;
1542 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001543
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001544 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1545 __FUNCTION__, layer, zOrder, dest);
1546
Saurabh Shah88e4d272013-09-03 13:31:29 -07001547 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001548 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001549}
1550
Saurabh Shah88e4d272013-09-03 13:31:29 -07001551bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001552 hwc_display_contents_1_t* list) {
1553 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001554
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001555 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001556
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001557 hwc_layer_1_t* layer = &list->hwLayers[index];
1558 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301559 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001560 if(allocSplitVGPipesfor4k2k(ctx, index)){
radhakrishnac9a67412013-09-25 17:40:42 +05301561 continue;
1562 }
1563 }
1564
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001565 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001566 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001567 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001568 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001569 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001570 ePipeType type = MDPCOMP_OV_ANY;
1571
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001572 if(isYuvBuffer(hnd)) {
1573 type = MDPCOMP_OV_VG;
Prabhanjan Kandula47191dc2014-01-22 23:01:45 +05301574 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
1575 (ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres > 1024)) {
1576 if(qhwc::needsScaling(layer))
1577 type = MDPCOMP_OV_RGB;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301578 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001579 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1580 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001581 type = MDPCOMP_OV_DMA;
1582 }
1583
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001584 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001585 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001586 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1587 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001588 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001589 }
1590 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001591 return true;
1592}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001593
radhakrishnac9a67412013-09-25 17:40:42 +05301594int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1595 PipeLayerPair& PipeLayerPair) {
1596 MdpYUVPipeInfo& mdp_info =
1597 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1598 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1599 eIsFg isFg = IS_FG_OFF;
1600 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1601 eDest lDest = mdp_info.lIndex;
1602 eDest rDest = mdp_info.rIndex;
1603
1604 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1605 lDest, rDest, &PipeLayerPair.rot);
1606}
1607
Saurabh Shah88e4d272013-09-03 13:31:29 -07001608bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001609
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001610 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001611 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1612 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001613 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001614
1615 if(!ctx || !list) {
1616 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001617 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001618 }
1619
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301620 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1621 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1622 return true;
1623 }
1624
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -08001625 // Set the Handle timeout to true for MDP or MIXED composition.
1626 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount) {
1627 sHandleTimeout = true;
1628 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001629
1630 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001631 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001632
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001633 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1634 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001635 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001636 if(mCurrentFrame.isFBComposed[i]) continue;
1637
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001638 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001639 private_handle_t *hnd = (private_handle_t *)layer->handle;
1640 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001641 if (!(layer->flags & HWC_COLOR_FILL)) {
1642 ALOGE("%s handle null", __FUNCTION__);
1643 return false;
1644 }
1645 // No PLAY for Color layer
1646 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1647 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001648 }
1649
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001650 int mdpIndex = mCurrentFrame.layerToMDP[i];
1651
radhakrishnac9a67412013-09-25 17:40:42 +05301652 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1653 {
1654 MdpYUVPipeInfo& pipe_info =
1655 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1656 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1657 ovutils::eDest indexL = pipe_info.lIndex;
1658 ovutils::eDest indexR = pipe_info.rIndex;
1659 int fd = hnd->fd;
1660 uint32_t offset = hnd->offset;
1661 if(rot) {
1662 rot->queueBuffer(fd, offset);
1663 fd = rot->getDstMemId();
1664 offset = rot->getDstOffset();
1665 }
1666 if(indexL != ovutils::OV_INVALID) {
1667 ovutils::eDest destL = (ovutils::eDest)indexL;
1668 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1669 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1670 if (!ov.queueBuffer(fd, offset, destL)) {
1671 ALOGE("%s: queueBuffer failed for display:%d",
1672 __FUNCTION__, mDpy);
1673 return false;
1674 }
1675 }
1676
1677 if(indexR != ovutils::OV_INVALID) {
1678 ovutils::eDest destR = (ovutils::eDest)indexR;
1679 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1680 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1681 if (!ov.queueBuffer(fd, offset, destR)) {
1682 ALOGE("%s: queueBuffer failed for display:%d",
1683 __FUNCTION__, mDpy);
1684 return false;
1685 }
1686 }
1687 }
1688 else{
1689 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001690 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301691 ovutils::eDest dest = pipe_info.index;
1692 if(dest == ovutils::OV_INVALID) {
1693 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001694 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301695 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001696
radhakrishnac9a67412013-09-25 17:40:42 +05301697 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1698 continue;
1699 }
1700
1701 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1702 using pipe: %d", __FUNCTION__, layer,
1703 hnd, dest );
1704
1705 int fd = hnd->fd;
1706 uint32_t offset = hnd->offset;
1707
1708 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1709 if(rot) {
1710 if(!rot->queueBuffer(fd, offset))
1711 return false;
1712 fd = rot->getDstMemId();
1713 offset = rot->getDstOffset();
1714 }
1715
1716 if (!ov.queueBuffer(fd, offset, dest)) {
1717 ALOGE("%s: queueBuffer failed for display:%d ",
1718 __FUNCTION__, mDpy);
1719 return false;
1720 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001721 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001722
1723 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001724 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001725 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001726}
1727
Saurabh Shah88e4d272013-09-03 13:31:29 -07001728//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001729
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001730void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301731 hwc_display_contents_1_t* list){
1732 //if 4kx2k yuv layer is totally present in either in left half
1733 //or right half then try splitting the yuv layer to avoid decimation
1734 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1735 const int lSplit = getLeftSplit(ctx, mDpy);
1736 for(int index = 0; index < n4k2kYuvCount; index++){
1737 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1738 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1739 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001740 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301741 mCurrentFrame.mdpCount += 1;
1742 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001743 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1744 mCurrentFrame.fbZ += 1;
1745 }
radhakrishnac9a67412013-09-25 17:40:42 +05301746 }
1747}
1748
Saurabh Shah88e4d272013-09-03 13:31:29 -07001749bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1750 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001751 ePipeType type) {
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001752 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001753
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001754 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001755 pipe_info.lIndex = ovutils::OV_INVALID;
1756 pipe_info.rIndex = ovutils::OV_INVALID;
1757
1758 if (dst.left < lSplit) {
1759 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001760 if(pipe_info.lIndex == ovutils::OV_INVALID)
1761 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001762 }
1763
1764 if(dst.right > lSplit) {
1765 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1766 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001767 return false;
1768 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001769
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001770 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001771}
1772
Saurabh Shah88e4d272013-09-03 13:31:29 -07001773bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001774 hwc_display_contents_1_t* list) {
1775 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001776
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001777 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001778
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001779 hwc_layer_1_t* layer = &list->hwLayers[index];
1780 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301781 hwc_rect_t dst = layer->displayFrame;
1782 const int lSplit = getLeftSplit(ctx, mDpy);
1783 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1784 if((dst.left > lSplit)||(dst.right < lSplit)){
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001785 if(allocSplitVGPipesfor4k2k(ctx, index)){
radhakrishnac9a67412013-09-25 17:40:42 +05301786 continue;
1787 }
1788 }
1789 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001790 int mdpIndex = mCurrentFrame.layerToMDP[index];
1791 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001792 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001793 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001794 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001795 ePipeType type = MDPCOMP_OV_ANY;
1796
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001797 if(isYuvBuffer(hnd)) {
1798 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001799 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001800 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001801 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001802 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001803 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001804
1805 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001806 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1807 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001808 return false;
1809 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001810 }
1811 return true;
1812}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001813
radhakrishnac9a67412013-09-25 17:40:42 +05301814int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1815 PipeLayerPair& PipeLayerPair) {
1816 const int lSplit = getLeftSplit(ctx, mDpy);
1817 hwc_rect_t dst = layer->displayFrame;
1818 if((dst.left > lSplit)||(dst.right < lSplit)){
1819 MdpYUVPipeInfo& mdp_info =
1820 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1821 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1822 eIsFg isFg = IS_FG_OFF;
1823 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1824 eDest lDest = mdp_info.lIndex;
1825 eDest rDest = mdp_info.rIndex;
1826
1827 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1828 lDest, rDest, &PipeLayerPair.rot);
1829 }
1830 else{
1831 return configure(ctx, layer, PipeLayerPair);
1832 }
1833}
1834
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001835/*
1836 * Configures pipe(s) for MDP composition
1837 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001838int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001839 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001840 MdpPipeInfoSplit& mdp_info =
1841 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001842 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1843 eIsFg isFg = IS_FG_OFF;
1844 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1845 eDest lDest = mdp_info.lIndex;
1846 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001847
1848 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
1849 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
1850
Saurabh Shah88e4d272013-09-03 13:31:29 -07001851 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001852 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001853}
1854
Saurabh Shah88e4d272013-09-03 13:31:29 -07001855bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001856
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001857 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001858 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1859 return true;
1860 }
1861
1862 if(!ctx || !list) {
1863 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001864 return false;
1865 }
1866
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301867 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1868 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1869 return true;
1870 }
1871
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -08001872 // Set the Handle timeout to true for MDP or MIXED composition.
1873 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount) {
1874 sHandleTimeout = true;
1875 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001876
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001877 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001878 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001879
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001880 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1881 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001882 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001883 if(mCurrentFrame.isFBComposed[i]) continue;
1884
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001885 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001886 private_handle_t *hnd = (private_handle_t *)layer->handle;
1887 if(!hnd) {
1888 ALOGE("%s handle null", __FUNCTION__);
1889 return false;
1890 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001891
1892 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1893 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001894 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001895
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001896 int mdpIndex = mCurrentFrame.layerToMDP[i];
1897
radhakrishnac9a67412013-09-25 17:40:42 +05301898 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1899 {
1900 MdpYUVPipeInfo& pipe_info =
1901 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1902 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1903 ovutils::eDest indexL = pipe_info.lIndex;
1904 ovutils::eDest indexR = pipe_info.rIndex;
1905 int fd = hnd->fd;
1906 uint32_t offset = hnd->offset;
1907 if(rot) {
1908 rot->queueBuffer(fd, offset);
1909 fd = rot->getDstMemId();
1910 offset = rot->getDstOffset();
1911 }
1912 if(indexL != ovutils::OV_INVALID) {
1913 ovutils::eDest destL = (ovutils::eDest)indexL;
1914 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1915 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1916 if (!ov.queueBuffer(fd, offset, destL)) {
1917 ALOGE("%s: queueBuffer failed for display:%d",
1918 __FUNCTION__, mDpy);
1919 return false;
1920 }
1921 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001922
radhakrishnac9a67412013-09-25 17:40:42 +05301923 if(indexR != ovutils::OV_INVALID) {
1924 ovutils::eDest destR = (ovutils::eDest)indexR;
1925 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1926 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1927 if (!ov.queueBuffer(fd, offset, destR)) {
1928 ALOGE("%s: queueBuffer failed for display:%d",
1929 __FUNCTION__, mDpy);
1930 return false;
1931 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07001932 }
1933 }
radhakrishnac9a67412013-09-25 17:40:42 +05301934 else{
1935 MdpPipeInfoSplit& pipe_info =
1936 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1937 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07001938
radhakrishnac9a67412013-09-25 17:40:42 +05301939 ovutils::eDest indexL = pipe_info.lIndex;
1940 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001941
radhakrishnac9a67412013-09-25 17:40:42 +05301942 int fd = hnd->fd;
1943 int offset = hnd->offset;
1944
1945 if(ctx->mAD->isModeOn()) {
1946 if(ctx->mAD->draw(ctx, fd, offset)) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001947 fd = ctx->mAD->getDstFd();
1948 offset = ctx->mAD->getDstOffset();
radhakrishnac9a67412013-09-25 17:40:42 +05301949 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001950 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001951
radhakrishnac9a67412013-09-25 17:40:42 +05301952 if(rot) {
1953 rot->queueBuffer(fd, offset);
1954 fd = rot->getDstMemId();
1955 offset = rot->getDstOffset();
1956 }
1957
1958 //************* play left mixer **********
1959 if(indexL != ovutils::OV_INVALID) {
1960 ovutils::eDest destL = (ovutils::eDest)indexL;
1961 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1962 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1963 if (!ov.queueBuffer(fd, offset, destL)) {
1964 ALOGE("%s: queueBuffer failed for left mixer",
1965 __FUNCTION__);
1966 return false;
1967 }
1968 }
1969
1970 //************* play right mixer **********
1971 if(indexR != ovutils::OV_INVALID) {
1972 ovutils::eDest destR = (ovutils::eDest)indexR;
1973 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1974 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1975 if (!ov.queueBuffer(fd, offset, destR)) {
1976 ALOGE("%s: queueBuffer failed for right mixer",
1977 __FUNCTION__);
1978 return false;
1979 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001980 }
1981 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001982
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001983 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1984 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001985
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001986 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001987}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001988}; //namespace
1989