blob: 6dc50f7e88e206531998acedf09f08fad92def5d [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800236 fbZ = 0;
237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Saurabh Shah4fdde762013-04-30 18:47:33 -0700303 const int dpy = HWC_DISPLAY_PRIMARY;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800304 private_handle_t *hnd = (private_handle_t *)layer->handle;
305
306 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700307 if (layer->flags & HWC_COLOR_FILL) {
308 // Color layer
309 return true;
310 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800311 ALOGE("%s: layer handle is NULL", __FUNCTION__);
312 return false;
313 }
314
Naseer Ahmede850a802013-09-06 13:12:52 -0400315 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400316 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400317 return false;
318
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800319 int hw_w = ctx->dpyAttr[mDpy].xres;
320 int hw_h = ctx->dpyAttr[mDpy].yres;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800321
Saurabh Shah62e1d732013-09-17 10:44:05 -0700322 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700323 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700324 int crop_w = crop.right - crop.left;
325 int crop_h = crop.bottom - crop.top;
326 int dst_w = dst.right - dst.left;
327 int dst_h = dst.bottom - dst.top;
328 float w_dscale = ceilf((float)crop_w / (float)dst_w);
329 float h_dscale = ceilf((float)crop_h / (float)dst_h);
330
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800331 /* Workaround for MDP HW limitation in DSI command mode panels where
332 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
333 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530334 * There also is a HW limilation in MDP, minimum block size is 2x2
335 * Fallback to GPU if height is less than 2.
336 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800337 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800338 return false;
339
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800340 if((w_dscale > 1.0f) || (h_dscale > 1.0f)) {
341 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700342 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800343 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
344 /* Workaround for downscales larger than 4x.
345 * Will be removed once decimator block is enabled for MDSS
346 */
347 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
348 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
349 h_dscale > downscale)
350 return false;
351 } else {
352 if(w_dscale > 64 || h_dscale > 64)
353 return false;
354 }
355 } else { //A-family
356 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700357 return false;
358 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700359 }
360
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800361 return true;
362}
363
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700364ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
365 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800366 overlay::Overlay& ov = *ctx->mOverlay;
367 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
368
369 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800370 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700371 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800372 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800373 return mdp_pipe;
374 }
375 case MDPCOMP_OV_ANY:
376 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700377 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800378 if(mdp_pipe != ovutils::OV_INVALID) {
379 return mdp_pipe;
380 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800381
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800382 if(type == MDPCOMP_OV_RGB) {
383 //Requested only for RGB pipe
384 break;
385 }
386 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700387 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800388 default:
389 ALOGE("%s: Invalid pipe type",__FUNCTION__);
390 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800391 };
392 return ovutils::OV_INVALID;
393}
394
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800395bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700396 bool ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700397 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800398
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800399 if(!isEnabled()) {
400 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700401 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700402 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Jeykumar Sankaran27dee262013-08-01 17:09:54 -0700403 ctx->mVideoTransFlag && ctx->mVirtualDisplay->isConnected()) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700404 //1 Padding round to shift pipes across mixers
405 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
406 __FUNCTION__);
407 ret = false;
Jeykumar Sankaran27dee262013-08-01 17:09:54 -0700408 } else if(ctx->dpyAttr[HWC_DISPLAY_EXTERNAL].isConfiguring ||
409 ctx->dpyAttr[HWC_DISPLAY_VIRTUAL].isConfiguring) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800410 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800411 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700412 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700413 } else if(ctx->isPaddingRound) {
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700414 ctx->isPaddingRound = false;
415 ALOGD_IF(isDebug(), "%s: padding round",__FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700416 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700417 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700418 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800419}
420
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800421/*
422 * 1) Identify layers that are not visible in the updating ROI and drop them
423 * from composition.
424 * 2) If we have a scaling layers which needs cropping against generated ROI.
425 * Reset ROI to full resolution.
426 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700427bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
428 hwc_display_contents_1_t* list, hwc_rect_t roi) {
429 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
430
431 if(!isValidRect(roi))
432 return false;
433
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800434 hwc_rect_t visibleRect = roi;
435
436 for(int i = numAppLayers - 1; i >= 0; i--){
437
438 if(!isValidRect(visibleRect)) {
439 mCurrentFrame.drop[i] = true;
440 mCurrentFrame.dropCount++;
441 }
442
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700443 const hwc_layer_1_t* layer = &list->hwLayers[i];
444
445 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700446 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700447 int transform = layer->transform;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700448
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800449 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700450
451 int res_w = res.right - res.left;
452 int res_h = res.bottom - res.top;
453 int dst_w = dstRect.right - dstRect.left;
454 int dst_h = dstRect.bottom - dstRect.top;
455
456 if(!isValidRect(res)) {
457 mCurrentFrame.drop[i] = true;
458 mCurrentFrame.dropCount++;
459 }else {
460 /* Reset frame ROI when any layer which needs scaling also needs ROI
461 * cropping */
462 if((res_w != dst_w || res_h != dst_h) &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530463 needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800464 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700465 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
466 mCurrentFrame.dropCount = 0;
467 return false;
468 }
469 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800470
471 if (layer->blending == HWC_BLENDING_NONE)
472 visibleRect = deductRect(visibleRect, res);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700473 }
474 return true;
475}
476
477void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
478 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
479
480 if(!sEnablePartialFrameUpdate) {
481 return;
482 }
483
484 if(mDpy || isDisplaySplit(ctx, mDpy)){
485 ALOGE_IF(isDebug(), "%s: ROI not supported for"
486 "the (1) external / virtual display's (2) dual DSI displays",
487 __FUNCTION__);
488 return;
489 }
490
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800491 if(isSkipPresent(ctx, mDpy))
492 return;
493
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700494 if(list->flags & HWC_GEOMETRY_CHANGED)
495 return;
496
497 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
498 for(int index = 0; index < numAppLayers; index++ ) {
499 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
500 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
501 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700502 hwc_rect_t srcRect = integerizeSourceCrop(
503 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700504 int transform = list->hwLayers[index].transform;
505
506 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700507 roi = getUnion(roi, dstRect);
508 }
509 }
510
511 if(!validateAndApplyROI(ctx, list, roi)){
512 roi = (struct hwc_rect) {0, 0,
513 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
514 }
515
516 ctx->listStats[mDpy].roi.x = roi.left;
517 ctx->listStats[mDpy].roi.y = roi.top;
518 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
519 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
520
521 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
522 roi.left, roi.top, roi.right, roi.bottom);
523}
524
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800525/* Checks for conditions where all the layers marked for MDP comp cannot be
526 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800527bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800528 hwc_display_contents_1_t* list){
529
Saurabh Shahaa236822013-04-24 18:07:26 -0700530 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800531
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700532 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700533 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
534 return false;
535 }
536
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800537 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700538 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
539 __FUNCTION__,
540 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800541 return false;
542 }
543
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800544 // check for action safe flag and downscale mode which requires scaling.
545 if(ctx->dpyAttr[mDpy].mActionSafePresent
546 || ctx->dpyAttr[mDpy].mDownScaleMode) {
547 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
548 return false;
549 }
550
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800551 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800552 hwc_layer_1_t* layer = &list->hwLayers[i];
553 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800554
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700555 if(isYuvBuffer(hnd) && has90Transform(layer)) {
556 if(!canUseRotator(ctx, mDpy)) {
557 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
558 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700559 return false;
560 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800561 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530562
563 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
564 // may not need it if Gfx pre-rotation can handle all flips & rotations
565 if(qdutils::MDPVersion::getInstance().is8x26() &&
566 (ctx->dpyAttr[mDpy].xres > 1024) &&
567 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
568 (!isYuvBuffer(hnd)))
569 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800570 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700571
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700572 if(ctx->mAD->isDoable()) {
573 return false;
574 }
575
Saurabh Shahaa236822013-04-24 18:07:26 -0700576 //If all above hard conditions are met we can do full or partial MDP comp.
577 bool ret = false;
578 if(fullMDPComp(ctx, list)) {
579 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700580 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700581 ret = true;
582 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530583
Saurabh Shahaa236822013-04-24 18:07:26 -0700584 return ret;
585}
586
587bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700588 //Will benefit presentation / secondary-only layer.
589 if((mDpy > HWC_DISPLAY_PRIMARY) &&
590 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
591 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
592 return false;
593 }
594
595 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
596 for(int i = 0; i < numAppLayers; i++) {
597 hwc_layer_1_t* layer = &list->hwLayers[i];
598 if(not isSupportedForMDPComp(ctx, layer)) {
599 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
600 return false;
601 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800602
603 //For 8x26, if there is only one layer which needs scale for secondary
604 //while no scale for primary display, DMA pipe is occupied by primary.
605 //If need to fall back to GLES composition, virtual display lacks DMA
606 //pipe and error is reported.
607 if(qdutils::MDPVersion::getInstance().is8x26() &&
608 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530609 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800610 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700611 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800612
Saurabh Shahaa236822013-04-24 18:07:26 -0700613 mCurrentFrame.fbCount = 0;
614 mCurrentFrame.fbZ = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700615 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
616 sizeof(mCurrentFrame.isFBComposed));
617 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
618 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700619
radhakrishnac9a67412013-09-25 17:40:42 +0530620 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800621 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530622 }
623
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800624 if(!postHeuristicsHandling(ctx, list)) {
625 ALOGD_IF(isDebug(), "post heuristic handling failed");
626 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700627 return false;
628 }
629
Saurabh Shahaa236822013-04-24 18:07:26 -0700630 return true;
631}
632
633bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
634{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700635 if(!sEnableMixedMode) {
636 //Mixed mode is disabled. No need to even try caching.
637 return false;
638 }
639
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700640 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800641 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
642 ret = loadBasedCompPreferGPU(ctx, list) or
643 loadBasedCompPreferMDP(ctx, list) or
644 cacheBasedComp(ctx, list);
645 } else {
646 ret = cacheBasedComp(ctx, list) or
647 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800648 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700649 }
650
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700651 return ret;
652}
653
654bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
655 hwc_display_contents_1_t* list) {
656 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700657 mCurrentFrame.reset(numAppLayers);
658 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700659
660 //If an MDP marked layer is unsupported cannot do partial MDP Comp
661 for(int i = 0; i < numAppLayers; i++) {
662 if(!mCurrentFrame.isFBComposed[i]) {
663 hwc_layer_1_t* layer = &list->hwLayers[i];
664 if(not isSupportedForMDPComp(ctx, layer)) {
665 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
666 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800667 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700668 return false;
669 }
670 }
671 }
672
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700673 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530674 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700675 if(!ret) {
676 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800677 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700678 return false;
679 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700680
681 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700682
radhakrishnac9a67412013-09-25 17:40:42 +0530683 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800684 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530685 }
686
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700687 //Will benefit cases where a video has non-updating background.
688 if((mDpy > HWC_DISPLAY_PRIMARY) and
689 (mdpCount > MAX_SEC_LAYERS)) {
690 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800691 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700692 return false;
693 }
694
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800695 if(!postHeuristicsHandling(ctx, list)) {
696 ALOGD_IF(isDebug(), "post heuristic handling failed");
697 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700698 return false;
699 }
700
Saurabh Shahaa236822013-04-24 18:07:26 -0700701 return true;
702}
703
Saurabh Shahb772ae32013-11-18 15:40:02 -0800704bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700705 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800706 if(not isLoadBasedCompDoable(ctx, list)) {
707 return false;
708 }
709
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700710 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
711 mCurrentFrame.reset(numAppLayers);
712
Saurabh Shahb772ae32013-11-18 15:40:02 -0800713 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
714 mDpy, Overlay::MIXER_DEFAULT));
715 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800716 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
717 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800718
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700719 if(batchSize <= 0) {
720 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
721 return false;
722 }
723
724 int minBatchStart = -1;
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800725 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700726 size_t minBatchPixelCount = SIZE_MAX;
727
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800728 /* Iterate through the layer list to find out a contigous batch of batchSize
729 * non-dropped layers with loweest pixel count */
730 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
731 if(mCurrentFrame.drop[i])
732 continue;
733
734 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700735 uint32_t batchPixelCount = 0;
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800736 int j = i;
737 for(; j < numAppLayers && batchCount; j++){
738 if(!mCurrentFrame.drop[j]) {
739 hwc_layer_1_t* layer = &list->hwLayers[j];
740 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
741 hwc_rect_t dst = layer->displayFrame;
742
743 /* If we have a valid ROI, count pixels only for the MDP fetched
744 * region of the buffer */
745 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
746 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
747 hwc_rect_t roi;
748 roi.left = ctx->listStats[mDpy].roi.x;
749 roi.top = ctx->listStats[mDpy].roi.y;
750 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
751 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
752
753 /* valid ROI means no scaling layer is composed. So check
754 * only intersection to find actual fetched pixels */
755 crop = getIntersection(roi, dst);
756 }
757
758 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700759 (crop.bottom - crop.top);
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800760 batchCount--;
761 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700762 }
763
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800764 /* we dont want to program any batch of size lesser than batchSize */
765 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700766 minBatchPixelCount = batchPixelCount;
767 minBatchStart = i;
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800768 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700769 }
770 }
771
772 if(minBatchStart < 0) {
773 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
774 __FUNCTION__, batchSize, numAppLayers);
775 return false;
776 }
777
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800778 /* non-dropped layers falling ouside the selected batch will be marked for
779 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700780 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800781 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700782 hwc_layer_1_t* layer = &list->hwLayers[i];
783 if(not isSupportedForMDPComp(ctx, layer)) {
784 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
785 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800786 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700787 return false;
788 }
789 mCurrentFrame.isFBComposed[i] = false;
790 }
791 }
792
793 mCurrentFrame.fbZ = minBatchStart;
794 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800795 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
796 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700797
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800798 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
799 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
800 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800801
radhakrishnac9a67412013-09-25 17:40:42 +0530802 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800803 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530804 }
805
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800806 if(!postHeuristicsHandling(ctx, list)) {
807 ALOGD_IF(isDebug(), "post heuristic handling failed");
808 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700809 return false;
810 }
811
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700812 return true;
813}
814
Saurabh Shahb772ae32013-11-18 15:40:02 -0800815bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
816 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800817 if(not isLoadBasedCompDoable(ctx, list)) {
818 return false;
819 }
820
Saurabh Shahb772ae32013-11-18 15:40:02 -0800821 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800822 mCurrentFrame.reset(numAppLayers);
823
Saurabh Shahb772ae32013-11-18 15:40:02 -0800824 //Full screen is from ib perspective, not actual full screen
825 const int bpp = 4;
826 double panelRefRate =
827 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
828
829 double bwLeft = sMaxBw - sBwClaimed;
830
831 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
832 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
833
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800834 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
835 - (fullScreenLayers - 1);
836
Saurabh Shahb772ae32013-11-18 15:40:02 -0800837 //If batch size is not at least 2, we aren't really preferring MDP, since
838 //only 1 layer going to GPU could actually translate into an entire FB
839 //needed to be fetched by MDP, thus needing more b/w rather than less.
840 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
841 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
842 return false;
843 }
844
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800845 //Find top fbBatchSize non-dropped layers to get your batch
846 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
847 for(int i = numAppLayers - 1; i >= 0; i--) {
848 if(mCurrentFrame.drop[i])
849 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800850
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800851 if(fbEnd < 0)
852 fbEnd = i;
853
854 if(!(--batchCount)) {
855 fbStart = i;
856 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800857 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800858 }
859
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800860 //Bottom layers constitute MDP batch
861 for(int i = 0; i < fbStart; i++) {
862 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
863 hwc_layer_1_t* layer = &list->hwLayers[i];
864 if(not isSupportedForMDPComp(ctx, layer)) {
865 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
866 __FUNCTION__, i);
867 reset(ctx);
868 return false;
869 }
870 mCurrentFrame.isFBComposed[i] = false;
871 }
872 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800873
Jeykumar Sankaran6279bc32014-01-23 21:59:58 -0800874 mCurrentFrame.fbZ = fbStart;
875 mCurrentFrame.fbCount = fbBatchSize;
876 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
877 - mCurrentFrame.dropCount;
878
879 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
880 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
881 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800882
radhakrishnac9a67412013-09-25 17:40:42 +0530883 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800884 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530885 }
886
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800887 if(!postHeuristicsHandling(ctx, list)) {
888 ALOGD_IF(isDebug(), "post heuristic handling failed");
889 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800890 return false;
891 }
892
Saurabh Shahb772ae32013-11-18 15:40:02 -0800893 return true;
894}
895
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700896bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx,
897 hwc_display_contents_1_t* list) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530898 if(mDpy or isSecurePresent(ctx, mDpy) or
899 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700900 return false;
901 }
902 return true;
903}
904
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800905bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
906 hwc_display_contents_1_t* list) {
907 const bool secureOnly = true;
908 return videoOnlyComp(ctx, list, not secureOnly) or
909 videoOnlyComp(ctx, list, secureOnly);
910}
911
912bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700913 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700914 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700915
Saurabh Shahaa236822013-04-24 18:07:26 -0700916 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700917 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700918 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700919
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800920 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
921 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700922 return false;
923 }
924
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800925 /* Bail out if we are processing only secured video layers
926 * and we dont have any */
927 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800928 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800929 return false;
930 }
931
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800932 if(mCurrentFrame.fbCount)
933 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700934
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800935 if(sEnable4k2kYUVSplit){
936 adjustForSourceSplit(ctx, list);
937 }
938
939 if(!postHeuristicsHandling(ctx, list)) {
940 ALOGD_IF(isDebug(), "post heuristic handling failed");
941 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700942 return false;
943 }
944
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800945 return true;
946}
947
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800948/* Checks for conditions where YUV layers cannot be bypassed */
949bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700950 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700951 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800952 return false;
953 }
954
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700955 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
956 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
957 return false;
958 }
959
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800960 if(isSecuring(ctx, layer)) {
961 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
962 return false;
963 }
964
Saurabh Shah4fdde762013-04-30 18:47:33 -0700965 if(!isValidDimension(ctx, layer)) {
966 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
967 __FUNCTION__);
968 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800969 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700970
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400971 if(layer->planeAlpha < 0xFF) {
972 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
973 in video only mode",
974 __FUNCTION__);
975 return false;
976 }
977
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800978 return true;
979}
980
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530981/* starts at fromIndex and check for each layer to find
982 * if it it has overlapping with any Updating layer above it in zorder
983 * till the end of the batch. returns true if it finds any intersection */
984bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
985 int fromIndex, int toIndex) {
986 for(int i = fromIndex; i < toIndex; i++) {
987 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
988 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
989 return false;
990 }
991 }
992 }
993 return true;
994}
995
996/* Checks if given layer at targetLayerIndex has any
997 * intersection with all the updating layers in beween
998 * fromIndex and toIndex. Returns true if it finds intersectiion */
999bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1000 int fromIndex, int toIndex, int targetLayerIndex) {
1001 for(int i = fromIndex; i <= toIndex; i++) {
1002 if(!mCurrentFrame.isFBComposed[i]) {
1003 if(areLayersIntersecting(&list->hwLayers[i],
1004 &list->hwLayers[targetLayerIndex])) {
1005 return true;
1006 }
1007 }
1008 }
1009 return false;
1010}
1011
1012int MDPComp::getBatch(hwc_display_contents_1_t* list,
1013 int& maxBatchStart, int& maxBatchEnd,
1014 int& maxBatchCount) {
1015 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301016 int fbZOrder =-1;
1017 while (i < mCurrentFrame.layerCount) {
1018 int batchCount = 0;
1019 int batchStart = i;
1020 int batchEnd = i;
1021 int fbZ = batchStart;
1022 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301023 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301024 while(i < mCurrentFrame.layerCount) {
1025 if(!mCurrentFrame.isFBComposed[i]) {
1026 if(!batchCount) {
1027 i++;
1028 break;
1029 }
1030 updatingLayersAbove++;
1031 i++;
1032 continue;
1033 } else {
1034 if(mCurrentFrame.drop[i]) {
1035 i++;
1036 continue;
1037 } else if(updatingLayersAbove <= 0) {
1038 batchCount++;
1039 batchEnd = i;
1040 i++;
1041 continue;
1042 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1043
1044 // We have a valid updating layer already. If layer-i not
1045 // have overlapping with all updating layers in between
1046 // batch-start and i, then we can add layer i to batch.
1047 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1048 batchCount++;
1049 batchEnd = i;
1050 i++;
1051 continue;
1052 } else if(canPushBatchToTop(list, batchStart, i)) {
1053 //If All the non-updating layers with in this batch
1054 //does not have intersection with the updating layers
1055 //above in z-order, then we can safely move the batch to
1056 //higher z-order. Increment fbZ as it is moving up.
1057 if( firstZReverseIndex < 0) {
1058 firstZReverseIndex = i;
1059 }
1060 batchCount++;
1061 batchEnd = i;
1062 fbZ += updatingLayersAbove;
1063 i++;
1064 updatingLayersAbove = 0;
1065 continue;
1066 } else {
1067 //both failed.start the loop again from here.
1068 if(firstZReverseIndex >= 0) {
1069 i = firstZReverseIndex;
1070 }
1071 break;
1072 }
1073 }
1074 }
1075 }
1076 if(batchCount > maxBatchCount) {
1077 maxBatchCount = batchCount;
1078 maxBatchStart = batchStart;
1079 maxBatchEnd = batchEnd;
1080 fbZOrder = fbZ;
1081 }
1082 }
1083 return fbZOrder;
1084}
1085
1086bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1087 hwc_display_contents_1_t* list) {
1088 /* Idea is to keep as many non-updating(cached) layers in FB and
1089 * send rest of them through MDP. This is done in 2 steps.
1090 * 1. Find the maximum contiguous batch of non-updating layers.
1091 * 2. See if we can improve this batch size for caching by adding
1092 * opaque layers around the batch, if they don't have
1093 * any overlapping with the updating layers in between.
1094 * NEVER mark an updating layer for caching.
1095 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001096
1097 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001098 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001099 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301100 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001101
1102 /* All or Nothing is cached. No batching needed */
Saurabh Shahaa236822013-04-24 18:07:26 -07001103 if(!mCurrentFrame.fbCount) {
1104 mCurrentFrame.fbZ = -1;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001105 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001106 }
1107 if(!mCurrentFrame.mdpCount) {
1108 mCurrentFrame.fbZ = 0;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001109 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001110 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001111
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301112 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001113
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301114 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001115 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001116 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001117 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301118 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001119 if(!mCurrentFrame.drop[i]){
1120 //If an unsupported layer is being attempted to
1121 //be pulled out we should fail
1122 if(not isSupportedForMDPComp(ctx, layer)) {
1123 return false;
1124 }
1125 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001126 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001127 }
1128 }
1129
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301130 // update the frame data
1131 mCurrentFrame.fbZ = fbZ;
1132 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001133 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001134 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001135
1136 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301137 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001138
1139 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001140}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001141
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001142void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001143 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001144 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001145 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001146
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001147 for(int i = 0; i < numAppLayers; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001148 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001149 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001150 if(!mCurrentFrame.drop[i])
1151 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001152 mCurrentFrame.isFBComposed[i] = true;
1153 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001154 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001155 }
1156 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001157
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001158 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001159 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1160 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001161
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001162 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1163 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1164 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001165}
1166
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001167void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1168 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001169 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1170 for(int index = 0;index < nYuvCount; index++){
1171 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1172 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1173
1174 if(!isYUVDoable(ctx, layer)) {
1175 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1176 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1177 mCurrentFrame.fbCount++;
1178 }
1179 } else {
1180 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001181 private_handle_t *hnd = (private_handle_t *)layer->handle;
1182 if(!secureOnly || isSecureBuffer(hnd)) {
1183 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1184 mCurrentFrame.fbCount--;
1185 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001186 }
1187 }
1188 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001189
1190 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001191 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1192 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001193 mCurrentFrame.fbCount);
1194}
1195
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001196bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1197 hwc_display_contents_1_t* list) {
1198
1199 //Capability checks
1200 if(!resourceCheck(ctx, list)) {
1201 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1202 return false;
1203 }
1204
1205 //Limitations checks
1206 if(!hwLimitationsCheck(ctx, list)) {
1207 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1208 return false;
1209 }
1210
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001211 //Configure framebuffer first if applicable
1212 if(mCurrentFrame.fbZ >= 0) {
1213 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1214 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1215 __FUNCTION__);
1216 return false;
1217 }
1218 }
1219
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001220 mCurrentFrame.map();
1221
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001222 if(!allocLayerPipes(ctx, list)) {
1223 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001224 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001225 }
1226
1227 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001228 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001229 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001230 int mdpIndex = mCurrentFrame.layerToMDP[index];
1231 hwc_layer_1_t* layer = &list->hwLayers[index];
1232
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301233 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1234 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1235 mdpNextZOrder++;
1236 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001237 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1238 cur_pipe->zOrder = mdpNextZOrder++;
1239
radhakrishnac9a67412013-09-25 17:40:42 +05301240 private_handle_t *hnd = (private_handle_t *)layer->handle;
1241 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1242 if(configure4k2kYuv(ctx, layer,
1243 mCurrentFrame.mdpToLayer[mdpIndex])
1244 != 0 ){
1245 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1246 for layer %d",__FUNCTION__, index);
1247 return false;
1248 }
1249 else{
1250 mdpNextZOrder++;
1251 }
1252 continue;
1253 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001254 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1255 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301256 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001257 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001258 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001259 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001260 }
1261
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001262 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001263 return true;
1264}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001265
Saurabh Shah173f4242013-11-20 09:50:12 -08001266bool MDPComp::resourceCheck(hwc_context_t *ctx,
1267 hwc_display_contents_1_t *list) {
1268 const bool fbUsed = mCurrentFrame.fbCount;
1269 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1270 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1271 return false;
1272 }
1273
1274 if(!arePipesAvailable(ctx, list)) {
1275 return false;
1276 }
1277
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001278 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001279 if(!bandwidthCheck(ctx, size)) {
1280 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1281 return false;
1282 }
1283
1284 return true;
1285}
1286
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001287double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001288 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001289 double size = 0;
1290 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001291
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001292 //Skip for targets where no device tree value for bw is supplied
1293 if(sMaxBw <= 0.0) {
1294 return 0.0;
1295 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001296
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001297 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1298 if(!mCurrentFrame.isFBComposed[i]) {
1299 hwc_layer_1_t* layer = &list->hwLayers[i];
1300 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001301 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001302 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001303 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001304 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001305 size += (bpp * (crop.right - crop.left) *
1306 (crop.bottom - crop.top) *
1307 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1308 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001309 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001310 }
1311 }
1312
1313 if(mCurrentFrame.fbCount) {
1314 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001315 int tempw, temph;
1316 size += (getBufferSizeAndDimensions(
1317 layer->displayFrame.right - layer->displayFrame.left,
1318 layer->displayFrame.bottom - layer->displayFrame.top,
1319 HAL_PIXEL_FORMAT_RGBA_8888,
1320 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001321 }
1322
1323 return size;
1324}
1325
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001326bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1327 //Skip for targets where no device tree value for bw is supplied
1328 if(sMaxBw <= 0.0) {
1329 return true;
1330 }
1331
1332 double panelRefRate =
1333 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1334 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1335 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001336 }
1337 return true;
1338}
1339
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301340bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1341 hwc_display_contents_1_t* list) {
1342
1343 //A-family hw limitation:
1344 //If a layer need alpha scaling, MDP can not support.
1345 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1346 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1347 if(!mCurrentFrame.isFBComposed[i] &&
1348 isAlphaScaled( &list->hwLayers[i])) {
1349 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1350 return false;
1351 }
1352 }
1353 }
1354
1355 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1356 //If multiple layers requires downscaling and also they are overlapping
1357 //fall back to GPU since MDSS can not handle it.
1358 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1359 qdutils::MDPVersion::getInstance().is8x26()) {
1360 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1361 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1362 if(!mCurrentFrame.isFBComposed[i] &&
1363 isDownscaleRequired(botLayer)) {
1364 //if layer-i is marked for MDP and needs downscaling
1365 //check if any MDP layer on top of i & overlaps with layer-i
1366 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1367 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1368 if(!mCurrentFrame.isFBComposed[j] &&
1369 isDownscaleRequired(topLayer)) {
1370 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1371 topLayer->displayFrame);
1372 if(isValidRect(r))
1373 return false;
1374 }
1375 }
1376 }
1377 }
1378 }
1379 return true;
1380}
1381
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001382int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001383 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001384 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001385 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001386
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001387 //number of app layers exceeds MAX_NUM_APP_LAYERS fall back to GPU
1388 //do not cache the information for next draw cycle.
1389 if(numLayers > MAX_NUM_APP_LAYERS) {
1390 ALOGI("%s: Number of App layers exceeded the limit ",
1391 __FUNCTION__);
1392 mCachedFrame.reset();
1393 return -1;
1394 }
1395
Saurabh Shahb39f8152013-08-22 10:21:44 -07001396 //reset old data
1397 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001398 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1399 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301400
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001401 // Detect the start of animation and fall back to GPU only once to cache
1402 // all the layers in FB and display FB content untill animation completes.
1403 if(ctx->listStats[mDpy].isDisplayAnimating) {
1404 mCurrentFrame.needsRedraw = false;
1405 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1406 mCurrentFrame.needsRedraw = true;
1407 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1408 }
1409 setMDPCompLayerFlags(ctx, list);
1410 mCachedFrame.updateCounts(mCurrentFrame);
1411 ret = -1;
1412 return ret;
1413 } else {
1414 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1415 }
1416
Saurabh Shahb39f8152013-08-22 10:21:44 -07001417 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001418 if(isFrameDoable(ctx)) {
1419 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001420
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001421 //Convert from kbps to gbps
1422 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1423 if (ctx->mExtDisplay->isConnected() ||
1424 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1425 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001426 }
1427
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001428 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1429 setMDPCompLayerFlags(ctx, list);
1430 } else {
1431 reset(ctx);
1432 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1433 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001434 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001435 }
1436 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001437 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1438 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001439 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001440 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001441
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001442 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001443 ALOGD("GEOMETRY change: %d",
1444 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001445 android::String8 sDump("");
1446 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001447 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001448 }
1449
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001450 mCachedFrame.cacheAll(list);
1451 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001452 double panelRefRate =
1453 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1454 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001455 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001456}
1457
radhakrishnac9a67412013-09-25 17:40:42 +05301458bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx,
1459 hwc_display_contents_1_t* list, int index) {
1460
1461 bool bRet = true;
1462 hwc_layer_1_t* layer = &list->hwLayers[index];
1463 private_handle_t *hnd = (private_handle_t *)layer->handle;
1464 int mdpIndex = mCurrentFrame.layerToMDP[index];
1465 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1466 info.pipeInfo = new MdpYUVPipeInfo;
1467 info.rot = NULL;
1468 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1469 ePipeType type = MDPCOMP_OV_VG;
1470
1471 pipe_info.lIndex = ovutils::OV_INVALID;
1472 pipe_info.rIndex = ovutils::OV_INVALID;
1473
1474 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1475 if(pipe_info.lIndex == ovutils::OV_INVALID){
1476 bRet = false;
1477 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1478 __FUNCTION__);
1479 }
1480 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1481 if(pipe_info.rIndex == ovutils::OV_INVALID){
1482 bRet = false;
1483 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1484 __FUNCTION__);
1485 }
1486 return bRet;
1487}
Saurabh Shah88e4d272013-09-03 13:31:29 -07001488//=============MDPCompNonSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001489
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001490void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301491 hwc_display_contents_1_t* list){
1492 //As we split 4kx2k yuv layer and program to 2 VG pipes
1493 //(if available) increase mdpcount accordingly
1494 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001495
1496 //If 4k2k Yuv layer split is possible, and if
1497 //fbz is above 4k2k layer, increment fb zorder by 1
1498 //as we split 4k2k layer and increment zorder for right half
1499 //of the layer
1500 if(mCurrentFrame.fbZ >= 0) {
1501 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1502 for(int index = 0; index < n4k2kYuvCount; index++){
1503 int n4k2kYuvIndex =
1504 ctx->listStats[mDpy].yuv4k2kIndices[index];
1505 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1506 mCurrentFrame.fbZ += 1;
1507 }
1508 }
1509 }
radhakrishnac9a67412013-09-25 17:40:42 +05301510}
1511
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001512/*
1513 * Configures pipe(s) for MDP composition
1514 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001515int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001516 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001517 MdpPipeInfoNonSplit& mdp_info =
1518 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001519 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1520 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1521 eIsFg isFg = IS_FG_OFF;
1522 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001523
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001524 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1525 __FUNCTION__, layer, zOrder, dest);
1526
Saurabh Shah88e4d272013-09-03 13:31:29 -07001527 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001528 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001529}
1530
Saurabh Shah88e4d272013-09-03 13:31:29 -07001531bool MDPCompNonSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001532 hwc_display_contents_1_t* list) {
1533 overlay::Overlay& ov = *ctx->mOverlay;
1534 int numPipesNeeded = mCurrentFrame.mdpCount;
1535 int availPipes = ov.availablePipes(mDpy, Overlay::MIXER_DEFAULT);
1536
1537 //Reserve pipe for FB
1538 if(mCurrentFrame.fbCount)
1539 availPipes -= 1;
1540
1541 if(numPipesNeeded > availPipes) {
1542 ALOGD_IF(isDebug(), "%s: Insufficient pipes, dpy %d needed %d, avail %d",
1543 __FUNCTION__, mDpy, numPipesNeeded, availPipes);
1544 return false;
1545 }
1546
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001547 if(not areVGPipesAvailable(ctx, list)) {
1548 return false;
1549 }
1550
1551 return true;
1552}
1553
1554bool MDPCompNonSplit::areVGPipesAvailable(hwc_context_t *ctx,
1555 hwc_display_contents_1_t* list) {
1556 overlay::Overlay& ov = *ctx->mOverlay;
1557 int pipesNeeded = 0;
1558 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1559 if(!mCurrentFrame.isFBComposed[i]) {
1560 hwc_layer_1_t* layer = &list->hwLayers[i];
1561 hwc_rect_t dst = layer->displayFrame;
1562 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301563 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1564 pipesNeeded = pipesNeeded + 2;
1565 }
1566 else if(isYuvBuffer(hnd)) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001567 pipesNeeded++;
1568 }
1569 }
1570 }
1571
1572 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1573 if(pipesNeeded > availableVGPipes) {
1574 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1575 "dpy %d needed %d, avail %d",
1576 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1577 return false;
1578 }
1579
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001580 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001581}
1582
Saurabh Shah88e4d272013-09-03 13:31:29 -07001583bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001584 hwc_display_contents_1_t* list) {
1585 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001586
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001587 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001588
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001589 hwc_layer_1_t* layer = &list->hwLayers[index];
1590 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301591 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1592 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1593 continue;
1594 }
1595 }
1596
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001597 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001598 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001599 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001600 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001601 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001602 ePipeType type = MDPCOMP_OV_ANY;
1603
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001604 if(isYuvBuffer(hnd)) {
1605 type = MDPCOMP_OV_VG;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301606 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001607 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1608 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001609 type = MDPCOMP_OV_DMA;
1610 }
1611
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001612 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001613 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001614 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1615 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001616 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001617 }
1618 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001619 return true;
1620}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001621
radhakrishnac9a67412013-09-25 17:40:42 +05301622int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1623 PipeLayerPair& PipeLayerPair) {
1624 MdpYUVPipeInfo& mdp_info =
1625 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1626 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1627 eIsFg isFg = IS_FG_OFF;
1628 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1629 eDest lDest = mdp_info.lIndex;
1630 eDest rDest = mdp_info.rIndex;
1631
1632 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1633 lDest, rDest, &PipeLayerPair.rot);
1634}
1635
Saurabh Shah88e4d272013-09-03 13:31:29 -07001636bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001637
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001638 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001639 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1640 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001641 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001642
1643 if(!ctx || !list) {
1644 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001645 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001646 }
1647
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301648 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1649 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1650 return true;
1651 }
1652
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001653 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001654 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahc2f04cf2014-01-23 18:39:01 -08001655 idleInvalidator->handleUpdateEvent();
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001656
1657 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001658 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001659
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001660 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1661 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001662 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001663 if(mCurrentFrame.isFBComposed[i]) continue;
1664
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001665 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001666 private_handle_t *hnd = (private_handle_t *)layer->handle;
1667 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001668 if (!(layer->flags & HWC_COLOR_FILL)) {
1669 ALOGE("%s handle null", __FUNCTION__);
1670 return false;
1671 }
1672 // No PLAY for Color layer
1673 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1674 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001675 }
1676
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001677 int mdpIndex = mCurrentFrame.layerToMDP[i];
1678
radhakrishnac9a67412013-09-25 17:40:42 +05301679 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1680 {
1681 MdpYUVPipeInfo& pipe_info =
1682 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1683 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1684 ovutils::eDest indexL = pipe_info.lIndex;
1685 ovutils::eDest indexR = pipe_info.rIndex;
1686 int fd = hnd->fd;
1687 uint32_t offset = hnd->offset;
1688 if(rot) {
1689 rot->queueBuffer(fd, offset);
1690 fd = rot->getDstMemId();
1691 offset = rot->getDstOffset();
1692 }
1693 if(indexL != ovutils::OV_INVALID) {
1694 ovutils::eDest destL = (ovutils::eDest)indexL;
1695 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1696 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1697 if (!ov.queueBuffer(fd, offset, destL)) {
1698 ALOGE("%s: queueBuffer failed for display:%d",
1699 __FUNCTION__, mDpy);
1700 return false;
1701 }
1702 }
1703
1704 if(indexR != ovutils::OV_INVALID) {
1705 ovutils::eDest destR = (ovutils::eDest)indexR;
1706 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1707 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1708 if (!ov.queueBuffer(fd, offset, destR)) {
1709 ALOGE("%s: queueBuffer failed for display:%d",
1710 __FUNCTION__, mDpy);
1711 return false;
1712 }
1713 }
1714 }
1715 else{
1716 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001717 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301718 ovutils::eDest dest = pipe_info.index;
1719 if(dest == ovutils::OV_INVALID) {
1720 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001721 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301722 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001723
radhakrishnac9a67412013-09-25 17:40:42 +05301724 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1725 continue;
1726 }
1727
1728 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1729 using pipe: %d", __FUNCTION__, layer,
1730 hnd, dest );
1731
1732 int fd = hnd->fd;
1733 uint32_t offset = hnd->offset;
1734
1735 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1736 if(rot) {
1737 if(!rot->queueBuffer(fd, offset))
1738 return false;
1739 fd = rot->getDstMemId();
1740 offset = rot->getDstOffset();
1741 }
1742
1743 if (!ov.queueBuffer(fd, offset, dest)) {
1744 ALOGE("%s: queueBuffer failed for display:%d ",
1745 __FUNCTION__, mDpy);
1746 return false;
1747 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001748 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001749
1750 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001751 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001752 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001753}
1754
Saurabh Shah88e4d272013-09-03 13:31:29 -07001755//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001756
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001757void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301758 hwc_display_contents_1_t* list){
1759 //if 4kx2k yuv layer is totally present in either in left half
1760 //or right half then try splitting the yuv layer to avoid decimation
1761 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1762 const int lSplit = getLeftSplit(ctx, mDpy);
1763 for(int index = 0; index < n4k2kYuvCount; index++){
1764 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1765 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1766 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001767 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301768 mCurrentFrame.mdpCount += 1;
1769 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001770 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1771 mCurrentFrame.fbZ += 1;
1772 }
radhakrishnac9a67412013-09-25 17:40:42 +05301773 }
1774}
1775
Saurabh Shah88e4d272013-09-03 13:31:29 -07001776int MDPCompSplit::pipesNeeded(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001777 hwc_display_contents_1_t* list,
1778 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001779 int pipesNeeded = 0;
Saurabh Shah67a38c32013-06-10 16:23:15 -07001780 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001781
1782 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001783
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001784 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1785 if(!mCurrentFrame.isFBComposed[i]) {
1786 hwc_layer_1_t* layer = &list->hwLayers[i];
1787 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001788 if(mixer == Overlay::MIXER_LEFT && dst.left < lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001789 pipesNeeded++;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001790 } else if(mixer == Overlay::MIXER_RIGHT && dst.right > lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001791 pipesNeeded++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001792 }
1793 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001794 }
1795 return pipesNeeded;
1796}
1797
Saurabh Shah88e4d272013-09-03 13:31:29 -07001798bool MDPCompSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001799 hwc_display_contents_1_t* list) {
1800 overlay::Overlay& ov = *ctx->mOverlay;
Saurabh Shah082468e2013-09-12 10:05:32 -07001801 int totalPipesNeeded = 0;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001802
1803 for(int i = 0; i < Overlay::MIXER_MAX; i++) {
1804 int numPipesNeeded = pipesNeeded(ctx, list, i);
1805 int availPipes = ov.availablePipes(mDpy, i);
1806
1807 //Reserve pipe(s)for FB
1808 if(mCurrentFrame.fbCount)
Saurabh Shah082468e2013-09-12 10:05:32 -07001809 numPipesNeeded += 1;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001810
Saurabh Shah082468e2013-09-12 10:05:32 -07001811 totalPipesNeeded += numPipesNeeded;
1812
1813 //Per mixer check.
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001814 if(numPipesNeeded > availPipes) {
1815 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1816 "dpy %d mixer %d needed %d, avail %d",
1817 __FUNCTION__, mDpy, i, numPipesNeeded, availPipes);
1818 return false;
1819 }
1820 }
Saurabh Shah082468e2013-09-12 10:05:32 -07001821
1822 //Per display check, since unused pipes can get counted twice.
1823 int totalPipesAvailable = ov.availablePipes(mDpy);
1824 if(totalPipesNeeded > totalPipesAvailable) {
1825 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1826 "dpy %d needed %d, avail %d",
1827 __FUNCTION__, mDpy, totalPipesNeeded, totalPipesAvailable);
1828 return false;
1829 }
1830
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001831 if(not areVGPipesAvailable(ctx, list)) {
1832 return false;
1833 }
1834
1835 return true;
1836}
1837
1838bool MDPCompSplit::areVGPipesAvailable(hwc_context_t *ctx,
1839 hwc_display_contents_1_t* list) {
1840 overlay::Overlay& ov = *ctx->mOverlay;
1841 int pipesNeeded = 0;
1842 const int lSplit = getLeftSplit(ctx, mDpy);
1843 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1844 if(!mCurrentFrame.isFBComposed[i]) {
1845 hwc_layer_1_t* layer = &list->hwLayers[i];
1846 hwc_rect_t dst = layer->displayFrame;
1847 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301848 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1849 if((dst.left > lSplit)||(dst.right < lSplit)){
1850 pipesNeeded = pipesNeeded + 2;
1851 continue;
1852 }
1853 }
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001854 if(isYuvBuffer(hnd)) {
1855 if(dst.left < lSplit) {
1856 pipesNeeded++;
1857 }
1858 if(dst.right > lSplit) {
1859 pipesNeeded++;
1860 }
1861 }
1862 }
1863 }
1864
1865 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1866 if(pipesNeeded > availableVGPipes) {
1867 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1868 "dpy %d needed %d, avail %d",
1869 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1870 return false;
1871 }
1872
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001873 return true;
1874}
1875
Saurabh Shah88e4d272013-09-03 13:31:29 -07001876bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1877 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001878 ePipeType type) {
1879 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001880 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001881
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001882 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001883 pipe_info.lIndex = ovutils::OV_INVALID;
1884 pipe_info.rIndex = ovutils::OV_INVALID;
1885
1886 if (dst.left < lSplit) {
1887 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001888 if(pipe_info.lIndex == ovutils::OV_INVALID)
1889 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001890 }
1891
1892 if(dst.right > lSplit) {
1893 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1894 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001895 return false;
1896 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001897
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001898 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001899}
1900
Saurabh Shah88e4d272013-09-03 13:31:29 -07001901bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001902 hwc_display_contents_1_t* list) {
1903 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001904
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001905 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001906
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001907 hwc_layer_1_t* layer = &list->hwLayers[index];
1908 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301909 hwc_rect_t dst = layer->displayFrame;
1910 const int lSplit = getLeftSplit(ctx, mDpy);
1911 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1912 if((dst.left > lSplit)||(dst.right < lSplit)){
1913 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1914 continue;
1915 }
1916 }
1917 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001918 int mdpIndex = mCurrentFrame.layerToMDP[index];
1919 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001920 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001921 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001922 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001923 ePipeType type = MDPCOMP_OV_ANY;
1924
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001925 if(isYuvBuffer(hnd)) {
1926 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001927 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001928 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001929 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001930 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001931 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001932
1933 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001934 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1935 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001936 return false;
1937 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001938 }
1939 return true;
1940}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001941
radhakrishnac9a67412013-09-25 17:40:42 +05301942int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1943 PipeLayerPair& PipeLayerPair) {
1944 const int lSplit = getLeftSplit(ctx, mDpy);
1945 hwc_rect_t dst = layer->displayFrame;
1946 if((dst.left > lSplit)||(dst.right < lSplit)){
1947 MdpYUVPipeInfo& mdp_info =
1948 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1949 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1950 eIsFg isFg = IS_FG_OFF;
1951 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1952 eDest lDest = mdp_info.lIndex;
1953 eDest rDest = mdp_info.rIndex;
1954
1955 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1956 lDest, rDest, &PipeLayerPair.rot);
1957 }
1958 else{
1959 return configure(ctx, layer, PipeLayerPair);
1960 }
1961}
1962
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001963/*
1964 * Configures pipe(s) for MDP composition
1965 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001966int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001967 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001968 MdpPipeInfoSplit& mdp_info =
1969 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001970 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1971 eIsFg isFg = IS_FG_OFF;
1972 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1973 eDest lDest = mdp_info.lIndex;
1974 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001975
1976 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
1977 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
1978
Saurabh Shah88e4d272013-09-03 13:31:29 -07001979 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001980 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001981}
1982
Saurabh Shah88e4d272013-09-03 13:31:29 -07001983bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001984
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001985 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001986 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1987 return true;
1988 }
1989
1990 if(!ctx || !list) {
1991 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001992 return false;
1993 }
1994
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301995 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1996 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1997 return true;
1998 }
1999
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002000 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07002001 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahc2f04cf2014-01-23 18:39:01 -08002002 idleInvalidator->handleUpdateEvent();
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002003
Naseer Ahmed54821fe2012-11-28 18:44:38 -05002004 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002005 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002006
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002007 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
2008 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002009 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002010 if(mCurrentFrame.isFBComposed[i]) continue;
2011
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002012 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08002013 private_handle_t *hnd = (private_handle_t *)layer->handle;
2014 if(!hnd) {
2015 ALOGE("%s handle null", __FUNCTION__);
2016 return false;
2017 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002018
2019 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
2020 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002021 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002022
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002023 int mdpIndex = mCurrentFrame.layerToMDP[i];
2024
radhakrishnac9a67412013-09-25 17:40:42 +05302025 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
2026 {
2027 MdpYUVPipeInfo& pipe_info =
2028 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2029 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
2030 ovutils::eDest indexL = pipe_info.lIndex;
2031 ovutils::eDest indexR = pipe_info.rIndex;
2032 int fd = hnd->fd;
2033 uint32_t offset = hnd->offset;
2034 if(rot) {
2035 rot->queueBuffer(fd, offset);
2036 fd = rot->getDstMemId();
2037 offset = rot->getDstOffset();
2038 }
2039 if(indexL != ovutils::OV_INVALID) {
2040 ovutils::eDest destL = (ovutils::eDest)indexL;
2041 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2042 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2043 if (!ov.queueBuffer(fd, offset, destL)) {
2044 ALOGE("%s: queueBuffer failed for display:%d",
2045 __FUNCTION__, mDpy);
2046 return false;
2047 }
2048 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002049
radhakrishnac9a67412013-09-25 17:40:42 +05302050 if(indexR != ovutils::OV_INVALID) {
2051 ovutils::eDest destR = (ovutils::eDest)indexR;
2052 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2053 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2054 if (!ov.queueBuffer(fd, offset, destR)) {
2055 ALOGE("%s: queueBuffer failed for display:%d",
2056 __FUNCTION__, mDpy);
2057 return false;
2058 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002059 }
2060 }
radhakrishnac9a67412013-09-25 17:40:42 +05302061 else{
2062 MdpPipeInfoSplit& pipe_info =
2063 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2064 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002065
radhakrishnac9a67412013-09-25 17:40:42 +05302066 ovutils::eDest indexL = pipe_info.lIndex;
2067 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002068
radhakrishnac9a67412013-09-25 17:40:42 +05302069 int fd = hnd->fd;
2070 int offset = hnd->offset;
2071
2072 if(ctx->mAD->isModeOn()) {
2073 if(ctx->mAD->draw(ctx, fd, offset)) {
2074 fd = ctx->mAD->getDstFd(ctx);
2075 offset = ctx->mAD->getDstOffset(ctx);
2076 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002077 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002078
radhakrishnac9a67412013-09-25 17:40:42 +05302079 if(rot) {
2080 rot->queueBuffer(fd, offset);
2081 fd = rot->getDstMemId();
2082 offset = rot->getDstOffset();
2083 }
2084
2085 //************* play left mixer **********
2086 if(indexL != ovutils::OV_INVALID) {
2087 ovutils::eDest destL = (ovutils::eDest)indexL;
2088 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2089 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2090 if (!ov.queueBuffer(fd, offset, destL)) {
2091 ALOGE("%s: queueBuffer failed for left mixer",
2092 __FUNCTION__);
2093 return false;
2094 }
2095 }
2096
2097 //************* play right mixer **********
2098 if(indexR != ovutils::OV_INVALID) {
2099 ovutils::eDest destR = (ovutils::eDest)indexR;
2100 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2101 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2102 if (!ov.queueBuffer(fd, offset, destR)) {
2103 ALOGE("%s: queueBuffer failed for right mixer",
2104 __FUNCTION__);
2105 return false;
2106 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002107 }
2108 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002109
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002110 layerProp[i].mFlags &= ~HWC_MDPCOMP;
2111 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002112
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002113 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002114}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002115}; //namespace
2116