blob: c773c915abaf79b3a5624551fb486570d55de418 [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08002 * Copyright (C) 2012-2014, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -080041bool MDPComp::sHandleTimeout = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070042bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050043bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070044bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070045bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080046int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080047double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080048double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053049bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070050
Saurabh Shah88e4d272013-09-03 13:31:29 -070051MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
52 if(isDisplaySplit(ctx, dpy)) {
53 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080054 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070055 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080056}
57
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080058MDPComp::MDPComp(int dpy):mDpy(dpy){};
59
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080060void MDPComp::dump(android::String8& buf)
61{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070062 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
63 return;
64
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080065 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070066 (mDpy == 0) ? "\"PRIMARY\"" :
67 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070068 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
69 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080070 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
71 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
72 (mCurrentFrame.needsRedraw? "YES" : "NO"),
73 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
74 dumpsys_log(buf," --------------------------------------------- \n");
75 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
76 dumpsys_log(buf," --------------------------------------------- \n");
77 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
78 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
79 index,
80 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070081 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080082 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070083 (mCurrentFrame.drop[index] ? "DROP" :
84 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080085 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
86 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
87 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080088}
89
90bool MDPComp::init(hwc_context_t *ctx) {
91
92 if(!ctx) {
93 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
94 return false;
95 }
96
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080097 char property[PROPERTY_VALUE_MAX];
98
99 sEnabled = false;
100 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800101 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
102 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800103 sEnabled = true;
104 }
105
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700106 sEnableMixedMode = true;
107 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
108 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
109 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
110 sEnableMixedMode = false;
111 }
112
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800113 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
114 if(atoi(property) != 0)
115 sDebugLogs = true;
116 }
117
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700118 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
119 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
120 qdutils::MDPVersion::getInstance().is8x74v2())
121 sEnablePartialFrameUpdate = true;
122 }
123 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
124 sEnablePartialFrameUpdate);
125
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800126 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700127 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
128 int val = atoi(property);
129 if(val >= 0)
130 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800131 }
132
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400133 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
134 // Idle invalidation is not necessary on command mode panels
135 long idle_timeout = DEFAULT_IDLE_TIME;
136 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
137 if(atoi(property) != 0)
138 idle_timeout = atoi(property);
139 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800140
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400141 //create Idle Invalidator only when not disabled through property
142 if(idle_timeout != -1)
143 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800144
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400145 if(idleInvalidator == NULL) {
146 ALOGE("%s: failed to instantiate idleInvalidator object",
147 __FUNCTION__);
148 } else {
149 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
150 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800151 }
radhakrishnac9a67412013-09-25 17:40:42 +0530152
153 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
154 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
155 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
156 sEnable4k2kYUVSplit = true;
157 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700158 return true;
159}
160
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800161void MDPComp::reset(hwc_context_t *ctx) {
162 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700163 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800164 ctx->mOverlay->clear(mDpy);
165 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700166}
167
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700168void MDPComp::timeout_handler(void *udata) {
169 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
170
171 if(!ctx) {
172 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
173 return;
174 }
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -0800175 Locker::Autolock _l(ctx->mDrawLock);
176 // Handle timeout event only if the previous composition is MDP or MIXED.
177 if(!sHandleTimeout) {
178 ALOGD_IF(isDebug(), "%s:Do not handle this timeout", __FUNCTION__);
179 return;
180 }
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182 ALOGE("%s: HWC proc not registered", __FUNCTION__);
183 return;
184 }
185 sIdleFallBack = true;
186 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700187 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700188}
189
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800190void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800191 hwc_display_contents_1_t* list) {
192 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800193
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800195 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800196 if(!mCurrentFrame.isFBComposed[index]) {
197 layerProp[index].mFlags |= HWC_MDPCOMP;
198 layer->compositionType = HWC_OVERLAY;
199 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800200 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700201 /* Drop the layer when its already present in FB OR when it lies
202 * outside frame's ROI */
203 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800204 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700205 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800206 }
207 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700208}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500209
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800210void MDPComp::setRedraw(hwc_context_t *ctx,
211 hwc_display_contents_1_t* list) {
212 mCurrentFrame.needsRedraw = false;
213 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
214 (list->flags & HWC_GEOMETRY_CHANGED) ||
215 isSkipPresent(ctx, mDpy)) {
216 mCurrentFrame.needsRedraw = true;
217 }
218}
219
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700221 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800222}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800223
Saurabh Shahaa236822013-04-24 18:07:26 -0700224void MDPComp::FrameInfo::reset(const int& numLayers) {
225 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800226 if(mdpToLayer[i].pipeInfo) {
227 delete mdpToLayer[i].pipeInfo;
228 mdpToLayer[i].pipeInfo = NULL;
229 //We dont own the rotator
230 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800231 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800232 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800233
234 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
235 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700236 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800237
Saurabh Shahaa236822013-04-24 18:07:26 -0700238 layerCount = numLayers;
239 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800240 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700241 needsRedraw = true;
Saurabh Shahd53bc5f2014-02-05 10:17:43 -0800242 fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800243}
244
Saurabh Shahaa236822013-04-24 18:07:26 -0700245void MDPComp::FrameInfo::map() {
246 // populate layer and MDP maps
247 int mdpIdx = 0;
248 for(int idx = 0; idx < layerCount; idx++) {
249 if(!isFBComposed[idx]) {
250 mdpToLayer[mdpIdx].listIndex = idx;
251 layerToMDP[idx] = mdpIdx++;
252 }
253 }
254}
255
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800256MDPComp::LayerCache::LayerCache() {
257 reset();
258}
259
260void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700261 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530262 memset(&isFBComposed, true, sizeof(isFBComposed));
263 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800264 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700265}
266
267void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
268 const int numAppLayers = list->numHwLayers - 1;
269 for(int i = 0; i < numAppLayers; i++) {
270 hnd[i] = list->hwLayers[i].handle;
271 }
272}
273
274void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700275 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
277 memcpy(&drop, &curFrame.drop, sizeof(drop));
278}
279
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800280bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
281 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530282 if(layerCount != curFrame.layerCount)
283 return false;
284 for(int i = 0; i < curFrame.layerCount; i++) {
285 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
286 (curFrame.drop[i] != drop[i])) {
287 return false;
288 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800289 if(curFrame.isFBComposed[i] &&
290 (hnd[i] != list->hwLayers[i].handle)){
291 return false;
292 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530293 }
294 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800295}
296
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700297bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
298 private_handle_t *hnd = (private_handle_t *)layer->handle;
299 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
300 (not isValidDimension(ctx,layer))
301 //More conditions here, SKIP, sRGB+Blend etc
302 ) {
303 return false;
304 }
305 return true;
306}
307
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530308bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800309 private_handle_t *hnd = (private_handle_t *)layer->handle;
310
311 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700312 if (layer->flags & HWC_COLOR_FILL) {
313 // Color layer
314 return true;
315 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800316 ALOGE("%s: layer handle is NULL", __FUNCTION__);
317 return false;
318 }
319
Naseer Ahmede850a802013-09-06 13:12:52 -0400320 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400321 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400322 return false;
323
Saurabh Shah62e1d732013-09-17 10:44:05 -0700324 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700325 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700326 int crop_w = crop.right - crop.left;
327 int crop_h = crop.bottom - crop.top;
328 int dst_w = dst.right - dst.left;
329 int dst_h = dst.bottom - dst.top;
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800330 float w_scale = ((float)crop_w / (float)dst_w);
331 float h_scale = ((float)crop_h / (float)dst_h);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700332
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800333 /* Workaround for MDP HW limitation in DSI command mode panels where
334 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
335 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530336 * There also is a HW limilation in MDP, minimum block size is 2x2
337 * Fallback to GPU if height is less than 2.
338 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800339 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800340 return false;
341
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800342 if((w_scale > 1.0f) || (h_scale > 1.0f)) {
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800343 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700344 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800345 const float w_dscale = w_scale;
346 const float h_dscale = h_scale;
347
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800348 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
349 /* Workaround for downscales larger than 4x.
350 * Will be removed once decimator block is enabled for MDSS
351 */
352 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
353 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
354 h_dscale > downscale)
355 return false;
356 } else {
357 if(w_dscale > 64 || h_dscale > 64)
358 return false;
359 }
360 } else { //A-family
361 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700362 return false;
363 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700364 }
365
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800366 if((w_scale < 1.0f) || (h_scale < 1.0f)) {
367 const uint32_t upscale =
368 qdutils::MDPVersion::getInstance().getMaxMDPUpscale();
369 const float w_uscale = 1.0f / w_scale;
370 const float h_uscale = 1.0f / h_scale;
371
372 if(w_uscale > upscale || h_uscale > upscale)
373 return false;
374 }
375
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800376 return true;
377}
378
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700379ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
380 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800381 overlay::Overlay& ov = *ctx->mOverlay;
382 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
383
384 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800385 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700386 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800387 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800388 return mdp_pipe;
389 }
390 case MDPCOMP_OV_ANY:
391 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700392 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800393 if(mdp_pipe != ovutils::OV_INVALID) {
394 return mdp_pipe;
395 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800396
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800397 if(type == MDPCOMP_OV_RGB) {
398 //Requested only for RGB pipe
399 break;
400 }
401 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700402 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800403 default:
404 ALOGE("%s: Invalid pipe type",__FUNCTION__);
405 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800406 };
407 return ovutils::OV_INVALID;
408}
409
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800410bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700411 bool ret = true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800412
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800413 if(!isEnabled()) {
414 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700415 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700416 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800417 ctx->mVideoTransFlag &&
418 isSecondaryConnected(ctx)) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700419 //1 Padding round to shift pipes across mixers
420 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
421 __FUNCTION__);
422 ret = false;
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800423 } else if(isSecondaryConfiguring(ctx)) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800424 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800425 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700426 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700427 } else if(ctx->isPaddingRound) {
Raj Kamal9ed3d6b2014-02-07 16:15:17 +0530428 ALOGD_IF(isDebug(), "%s: padding round invoked for dpy %d",
429 __FUNCTION__,mDpy);
Saurabh Shahaa236822013-04-24 18:07:26 -0700430 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700431 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700432 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800433}
434
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800435/*
436 * 1) Identify layers that are not visible in the updating ROI and drop them
437 * from composition.
438 * 2) If we have a scaling layers which needs cropping against generated ROI.
439 * Reset ROI to full resolution.
440 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700441bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
442 hwc_display_contents_1_t* list, hwc_rect_t roi) {
443 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
444
445 if(!isValidRect(roi))
446 return false;
447
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800448 hwc_rect_t visibleRect = roi;
449
450 for(int i = numAppLayers - 1; i >= 0; i--){
451
452 if(!isValidRect(visibleRect)) {
453 mCurrentFrame.drop[i] = true;
454 mCurrentFrame.dropCount++;
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800455 continue;
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800456 }
457
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700458 const hwc_layer_1_t* layer = &list->hwLayers[i];
459
460 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700461 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700462
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800463 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700464
465 int res_w = res.right - res.left;
466 int res_h = res.bottom - res.top;
467 int dst_w = dstRect.right - dstRect.left;
468 int dst_h = dstRect.bottom - dstRect.top;
469
470 if(!isValidRect(res)) {
471 mCurrentFrame.drop[i] = true;
472 mCurrentFrame.dropCount++;
473 }else {
474 /* Reset frame ROI when any layer which needs scaling also needs ROI
475 * cropping */
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800476 if((res_w != dst_w || res_h != dst_h) && needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800477 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700478 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
479 mCurrentFrame.dropCount = 0;
480 return false;
481 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800482
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800483 /* deduct any opaque region from visibleRect */
484 if (layer->blending == HWC_BLENDING_NONE)
485 visibleRect = deductRect(visibleRect, res);
486 }
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700487 }
488 return true;
489}
490
491void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
492 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
493
494 if(!sEnablePartialFrameUpdate) {
495 return;
496 }
497
498 if(mDpy || isDisplaySplit(ctx, mDpy)){
499 ALOGE_IF(isDebug(), "%s: ROI not supported for"
500 "the (1) external / virtual display's (2) dual DSI displays",
501 __FUNCTION__);
502 return;
503 }
504
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800505 if(isSkipPresent(ctx, mDpy))
506 return;
507
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700508 if(list->flags & HWC_GEOMETRY_CHANGED)
509 return;
510
511 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
512 for(int index = 0; index < numAppLayers; index++ ) {
513 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
514 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
515 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700516 hwc_rect_t srcRect = integerizeSourceCrop(
517 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700518
519 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700520 roi = getUnion(roi, dstRect);
521 }
522 }
523
524 if(!validateAndApplyROI(ctx, list, roi)){
525 roi = (struct hwc_rect) {0, 0,
526 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
527 }
528
529 ctx->listStats[mDpy].roi.x = roi.left;
530 ctx->listStats[mDpy].roi.y = roi.top;
531 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
532 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
533
534 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
535 roi.left, roi.top, roi.right, roi.bottom);
536}
537
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800538/* Checks for conditions where all the layers marked for MDP comp cannot be
539 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800540bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800541 hwc_display_contents_1_t* list){
542
Saurabh Shahaa236822013-04-24 18:07:26 -0700543 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800544 int priDispW = ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800545
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700546 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700547 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
548 return false;
549 }
550
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800551 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700552 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
553 __FUNCTION__,
554 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800555 return false;
556 }
557
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800558 if(mDpy > HWC_DISPLAY_PRIMARY && (priDispW > MAX_DISPLAY_DIM) &&
559 (ctx->dpyAttr[mDpy].xres < MAX_DISPLAY_DIM)) {
560 // Disable MDP comp on Secondary when the primary is highres panel and
561 // the secondary is a normal 1080p, because, MDP comp on secondary under
562 // in such usecase, decimation gets used for downscale and there will be
563 // a quality mismatch when there will be a fallback to GPU comp
564 ALOGD_IF(isDebug(), "%s: Disable MDP Compositon for Secondary Disp",
565 __FUNCTION__);
566 return false;
567 }
568
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800569 // check for action safe flag and downscale mode which requires scaling.
570 if(ctx->dpyAttr[mDpy].mActionSafePresent
571 || ctx->dpyAttr[mDpy].mDownScaleMode) {
572 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
573 return false;
574 }
575
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800576 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800577 hwc_layer_1_t* layer = &list->hwLayers[i];
578 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800579
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700580 if(isYuvBuffer(hnd) && has90Transform(layer)) {
581 if(!canUseRotator(ctx, mDpy)) {
582 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
583 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700584 return false;
585 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800586 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530587
588 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
589 // may not need it if Gfx pre-rotation can handle all flips & rotations
590 if(qdutils::MDPVersion::getInstance().is8x26() &&
591 (ctx->dpyAttr[mDpy].xres > 1024) &&
592 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
593 (!isYuvBuffer(hnd)))
594 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800595 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700596
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700597 if(ctx->mAD->isDoable()) {
598 return false;
599 }
600
Saurabh Shahaa236822013-04-24 18:07:26 -0700601 //If all above hard conditions are met we can do full or partial MDP comp.
602 bool ret = false;
603 if(fullMDPComp(ctx, list)) {
604 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700605 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700606 ret = true;
607 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530608
Saurabh Shahaa236822013-04-24 18:07:26 -0700609 return ret;
610}
611
612bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700613 //Will benefit presentation / secondary-only layer.
614 if((mDpy > HWC_DISPLAY_PRIMARY) &&
615 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
616 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
617 return false;
618 }
619
620 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
621 for(int i = 0; i < numAppLayers; i++) {
622 hwc_layer_1_t* layer = &list->hwLayers[i];
623 if(not isSupportedForMDPComp(ctx, layer)) {
624 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
625 return false;
626 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800627
628 //For 8x26, if there is only one layer which needs scale for secondary
629 //while no scale for primary display, DMA pipe is occupied by primary.
630 //If need to fall back to GLES composition, virtual display lacks DMA
631 //pipe and error is reported.
632 if(qdutils::MDPVersion::getInstance().is8x26() &&
633 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530634 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800635 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700636 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800637
Saurabh Shahaa236822013-04-24 18:07:26 -0700638 mCurrentFrame.fbCount = 0;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700639 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
640 sizeof(mCurrentFrame.isFBComposed));
641 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
642 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700643
radhakrishnac9a67412013-09-25 17:40:42 +0530644 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800645 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530646 }
647
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800648 if(!postHeuristicsHandling(ctx, list)) {
649 ALOGD_IF(isDebug(), "post heuristic handling failed");
650 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700651 return false;
652 }
653
Saurabh Shahaa236822013-04-24 18:07:26 -0700654 return true;
655}
656
657bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
658{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700659 if(!sEnableMixedMode) {
660 //Mixed mode is disabled. No need to even try caching.
661 return false;
662 }
663
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700664 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800665 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
666 ret = loadBasedCompPreferGPU(ctx, list) or
667 loadBasedCompPreferMDP(ctx, list) or
668 cacheBasedComp(ctx, list);
669 } else {
670 ret = cacheBasedComp(ctx, list) or
671 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800672 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700673 }
674
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700675 return ret;
676}
677
678bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
679 hwc_display_contents_1_t* list) {
680 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700681 mCurrentFrame.reset(numAppLayers);
682 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700683
684 //If an MDP marked layer is unsupported cannot do partial MDP Comp
685 for(int i = 0; i < numAppLayers; i++) {
686 if(!mCurrentFrame.isFBComposed[i]) {
687 hwc_layer_1_t* layer = &list->hwLayers[i];
688 if(not isSupportedForMDPComp(ctx, layer)) {
689 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
690 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800691 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700692 return false;
693 }
694 }
695 }
696
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700697 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530698 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700699 if(!ret) {
700 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800701 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700702 return false;
703 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700704
705 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700706
radhakrishnac9a67412013-09-25 17:40:42 +0530707 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800708 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530709 }
710
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700711 //Will benefit cases where a video has non-updating background.
712 if((mDpy > HWC_DISPLAY_PRIMARY) and
713 (mdpCount > MAX_SEC_LAYERS)) {
714 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800715 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700716 return false;
717 }
718
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800719 if(!postHeuristicsHandling(ctx, list)) {
720 ALOGD_IF(isDebug(), "post heuristic handling failed");
721 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700722 return false;
723 }
724
Saurabh Shahaa236822013-04-24 18:07:26 -0700725 return true;
726}
727
Saurabh Shahb772ae32013-11-18 15:40:02 -0800728bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700729 hwc_display_contents_1_t* list) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800730 if(not isLoadBasedCompDoable(ctx)) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800731 return false;
732 }
733
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700734 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
735 mCurrentFrame.reset(numAppLayers);
736
Saurabh Shahb772ae32013-11-18 15:40:02 -0800737 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
738 mDpy, Overlay::MIXER_DEFAULT));
739 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800740 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
741 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800742
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700743 if(batchSize <= 0) {
744 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
745 return false;
746 }
747
748 int minBatchStart = -1;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800749 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700750 size_t minBatchPixelCount = SIZE_MAX;
751
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800752 /* Iterate through the layer list to find out a contigous batch of batchSize
753 * non-dropped layers with loweest pixel count */
754 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
755 if(mCurrentFrame.drop[i])
756 continue;
757
758 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700759 uint32_t batchPixelCount = 0;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800760 int j = i;
761 for(; j < numAppLayers && batchCount; j++){
762 if(!mCurrentFrame.drop[j]) {
763 hwc_layer_1_t* layer = &list->hwLayers[j];
764 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
765 hwc_rect_t dst = layer->displayFrame;
766
767 /* If we have a valid ROI, count pixels only for the MDP fetched
768 * region of the buffer */
769 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
770 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
771 hwc_rect_t roi;
772 roi.left = ctx->listStats[mDpy].roi.x;
773 roi.top = ctx->listStats[mDpy].roi.y;
774 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
775 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
776
777 /* valid ROI means no scaling layer is composed. So check
778 * only intersection to find actual fetched pixels */
779 crop = getIntersection(roi, dst);
780 }
781
782 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700783 (crop.bottom - crop.top);
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800784 batchCount--;
785 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700786 }
787
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800788 /* we dont want to program any batch of size lesser than batchSize */
789 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700790 minBatchPixelCount = batchPixelCount;
791 minBatchStart = i;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800792 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700793 }
794 }
795
796 if(minBatchStart < 0) {
797 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
798 __FUNCTION__, batchSize, numAppLayers);
799 return false;
800 }
801
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800802 /* non-dropped layers falling ouside the selected batch will be marked for
803 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700804 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800805 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700806 hwc_layer_1_t* layer = &list->hwLayers[i];
807 if(not isSupportedForMDPComp(ctx, layer)) {
808 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
809 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800810 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700811 return false;
812 }
813 mCurrentFrame.isFBComposed[i] = false;
814 }
815 }
816
817 mCurrentFrame.fbZ = minBatchStart;
818 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800819 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
820 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700821
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800822 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
823 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
824 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800825
radhakrishnac9a67412013-09-25 17:40:42 +0530826 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800827 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530828 }
829
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800830 if(!postHeuristicsHandling(ctx, list)) {
831 ALOGD_IF(isDebug(), "post heuristic handling failed");
832 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700833 return false;
834 }
835
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700836 return true;
837}
838
Saurabh Shahb772ae32013-11-18 15:40:02 -0800839bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
840 hwc_display_contents_1_t* list) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800841 if(not isLoadBasedCompDoable(ctx)) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800842 return false;
843 }
844
Saurabh Shahb772ae32013-11-18 15:40:02 -0800845 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800846 mCurrentFrame.reset(numAppLayers);
847
Saurabh Shahb772ae32013-11-18 15:40:02 -0800848 //Full screen is from ib perspective, not actual full screen
849 const int bpp = 4;
850 double panelRefRate =
851 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
852
853 double bwLeft = sMaxBw - sBwClaimed;
854
855 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
856 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
857
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800858 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
859 - (fullScreenLayers - 1);
860
Saurabh Shahb772ae32013-11-18 15:40:02 -0800861 //If batch size is not at least 2, we aren't really preferring MDP, since
862 //only 1 layer going to GPU could actually translate into an entire FB
863 //needed to be fetched by MDP, thus needing more b/w rather than less.
864 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
865 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
866 return false;
867 }
868
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800869 //Find top fbBatchSize non-dropped layers to get your batch
870 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
871 for(int i = numAppLayers - 1; i >= 0; i--) {
872 if(mCurrentFrame.drop[i])
873 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800874
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800875 if(fbEnd < 0)
876 fbEnd = i;
877
878 if(!(--batchCount)) {
879 fbStart = i;
880 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800881 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800882 }
883
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800884 //Bottom layers constitute MDP batch
885 for(int i = 0; i < fbStart; i++) {
886 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
887 hwc_layer_1_t* layer = &list->hwLayers[i];
888 if(not isSupportedForMDPComp(ctx, layer)) {
889 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
890 __FUNCTION__, i);
891 reset(ctx);
892 return false;
893 }
894 mCurrentFrame.isFBComposed[i] = false;
895 }
896 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800897
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800898 mCurrentFrame.fbZ = fbStart;
899 mCurrentFrame.fbCount = fbBatchSize;
900 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
901 - mCurrentFrame.dropCount;
902
903 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
904 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
905 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800906
radhakrishnac9a67412013-09-25 17:40:42 +0530907 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800908 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530909 }
910
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800911 if(!postHeuristicsHandling(ctx, list)) {
912 ALOGD_IF(isDebug(), "post heuristic handling failed");
913 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800914 return false;
915 }
916
Saurabh Shahb772ae32013-11-18 15:40:02 -0800917 return true;
918}
919
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800920bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530921 if(mDpy or isSecurePresent(ctx, mDpy) or
922 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700923 return false;
924 }
925 return true;
926}
927
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800928bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
929 hwc_display_contents_1_t* list) {
930 const bool secureOnly = true;
931 return videoOnlyComp(ctx, list, not secureOnly) or
932 videoOnlyComp(ctx, list, secureOnly);
933}
934
935bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700936 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700937 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700938
Saurabh Shahaa236822013-04-24 18:07:26 -0700939 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700940 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700941 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700942
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800943 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
944 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700945 return false;
946 }
947
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800948 /* Bail out if we are processing only secured video layers
949 * and we dont have any */
950 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800951 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800952 return false;
953 }
954
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800955 if(mCurrentFrame.fbCount)
956 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700957
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800958 if(sEnable4k2kYUVSplit){
959 adjustForSourceSplit(ctx, list);
960 }
961
962 if(!postHeuristicsHandling(ctx, list)) {
963 ALOGD_IF(isDebug(), "post heuristic handling failed");
964 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700965 return false;
966 }
967
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800968 return true;
969}
970
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800971/* Checks for conditions where YUV layers cannot be bypassed */
972bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700973 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700974 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800975 return false;
976 }
977
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700978 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
979 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
980 return false;
981 }
982
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800983 if(isSecuring(ctx, layer)) {
984 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
985 return false;
986 }
987
Saurabh Shah4fdde762013-04-30 18:47:33 -0700988 if(!isValidDimension(ctx, layer)) {
989 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
990 __FUNCTION__);
991 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800992 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700993
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400994 if(layer->planeAlpha < 0xFF) {
995 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
996 in video only mode",
997 __FUNCTION__);
998 return false;
999 }
1000
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001001 return true;
1002}
1003
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301004/* starts at fromIndex and check for each layer to find
1005 * if it it has overlapping with any Updating layer above it in zorder
1006 * till the end of the batch. returns true if it finds any intersection */
1007bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
1008 int fromIndex, int toIndex) {
1009 for(int i = fromIndex; i < toIndex; i++) {
1010 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1011 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
1012 return false;
1013 }
1014 }
1015 }
1016 return true;
1017}
1018
1019/* Checks if given layer at targetLayerIndex has any
1020 * intersection with all the updating layers in beween
1021 * fromIndex and toIndex. Returns true if it finds intersectiion */
1022bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1023 int fromIndex, int toIndex, int targetLayerIndex) {
1024 for(int i = fromIndex; i <= toIndex; i++) {
1025 if(!mCurrentFrame.isFBComposed[i]) {
1026 if(areLayersIntersecting(&list->hwLayers[i],
1027 &list->hwLayers[targetLayerIndex])) {
1028 return true;
1029 }
1030 }
1031 }
1032 return false;
1033}
1034
1035int MDPComp::getBatch(hwc_display_contents_1_t* list,
1036 int& maxBatchStart, int& maxBatchEnd,
1037 int& maxBatchCount) {
1038 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301039 int fbZOrder =-1;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001040 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301041 while (i < mCurrentFrame.layerCount) {
1042 int batchCount = 0;
1043 int batchStart = i;
1044 int batchEnd = i;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001045 /* Adjust batch Z order with the dropped layers so far */
1046 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301047 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301048 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301049 while(i < mCurrentFrame.layerCount) {
1050 if(!mCurrentFrame.isFBComposed[i]) {
1051 if(!batchCount) {
1052 i++;
1053 break;
1054 }
1055 updatingLayersAbove++;
1056 i++;
1057 continue;
1058 } else {
1059 if(mCurrentFrame.drop[i]) {
1060 i++;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001061 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301062 continue;
1063 } else if(updatingLayersAbove <= 0) {
1064 batchCount++;
1065 batchEnd = i;
1066 i++;
1067 continue;
1068 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1069
1070 // We have a valid updating layer already. If layer-i not
1071 // have overlapping with all updating layers in between
1072 // batch-start and i, then we can add layer i to batch.
1073 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1074 batchCount++;
1075 batchEnd = i;
1076 i++;
1077 continue;
1078 } else if(canPushBatchToTop(list, batchStart, i)) {
1079 //If All the non-updating layers with in this batch
1080 //does not have intersection with the updating layers
1081 //above in z-order, then we can safely move the batch to
1082 //higher z-order. Increment fbZ as it is moving up.
1083 if( firstZReverseIndex < 0) {
1084 firstZReverseIndex = i;
1085 }
1086 batchCount++;
1087 batchEnd = i;
1088 fbZ += updatingLayersAbove;
1089 i++;
1090 updatingLayersAbove = 0;
1091 continue;
1092 } else {
1093 //both failed.start the loop again from here.
1094 if(firstZReverseIndex >= 0) {
1095 i = firstZReverseIndex;
1096 }
1097 break;
1098 }
1099 }
1100 }
1101 }
1102 if(batchCount > maxBatchCount) {
1103 maxBatchCount = batchCount;
1104 maxBatchStart = batchStart;
1105 maxBatchEnd = batchEnd;
1106 fbZOrder = fbZ;
1107 }
1108 }
1109 return fbZOrder;
1110}
1111
1112bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1113 hwc_display_contents_1_t* list) {
1114 /* Idea is to keep as many non-updating(cached) layers in FB and
1115 * send rest of them through MDP. This is done in 2 steps.
1116 * 1. Find the maximum contiguous batch of non-updating layers.
1117 * 2. See if we can improve this batch size for caching by adding
1118 * opaque layers around the batch, if they don't have
1119 * any overlapping with the updating layers in between.
1120 * NEVER mark an updating layer for caching.
1121 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001122
1123 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001124 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001125 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301126 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001127
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001128 /* Nothing is cached. No batching needed */
1129 if(mCurrentFrame.fbCount == 0) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001130 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001131 }
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001132
1133 /* No MDP comp layers, try to use other comp modes */
1134 if(mCurrentFrame.mdpCount == 0) {
1135 return false;
Saurabh Shahaa236822013-04-24 18:07:26 -07001136 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001137
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301138 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001139
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301140 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001141 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001142 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001143 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301144 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001145 if(!mCurrentFrame.drop[i]){
1146 //If an unsupported layer is being attempted to
1147 //be pulled out we should fail
1148 if(not isSupportedForMDPComp(ctx, layer)) {
1149 return false;
1150 }
1151 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001152 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001153 }
1154 }
1155
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301156 // update the frame data
1157 mCurrentFrame.fbZ = fbZ;
1158 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001159 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001160 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001161
1162 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301163 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001164
1165 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001166}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001167
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001168void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001169 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001170 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001171 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001172
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001173 for(int i = 0; i < numAppLayers; i++) {
1174 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001175 if(!mCurrentFrame.drop[i])
1176 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001177 mCurrentFrame.isFBComposed[i] = true;
1178 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001179 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001180 }
1181 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001182
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001183 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001184 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1185 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001186
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001187 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1188 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1189 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001190}
1191
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001192void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1193 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001194 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1195 for(int index = 0;index < nYuvCount; index++){
1196 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1197 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1198
1199 if(!isYUVDoable(ctx, layer)) {
1200 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1201 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1202 mCurrentFrame.fbCount++;
1203 }
1204 } else {
1205 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001206 private_handle_t *hnd = (private_handle_t *)layer->handle;
1207 if(!secureOnly || isSecureBuffer(hnd)) {
1208 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1209 mCurrentFrame.fbCount--;
1210 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001211 }
1212 }
1213 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001214
1215 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001216 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1217 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001218 mCurrentFrame.fbCount);
1219}
1220
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001221bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1222 hwc_display_contents_1_t* list) {
1223
1224 //Capability checks
1225 if(!resourceCheck(ctx, list)) {
1226 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1227 return false;
1228 }
1229
1230 //Limitations checks
1231 if(!hwLimitationsCheck(ctx, list)) {
1232 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1233 return false;
1234 }
1235
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001236 //Configure framebuffer first if applicable
1237 if(mCurrentFrame.fbZ >= 0) {
1238 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1239 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1240 __FUNCTION__);
1241 return false;
1242 }
1243 }
1244
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001245 mCurrentFrame.map();
1246
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001247 if(!allocLayerPipes(ctx, list)) {
1248 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001249 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001250 }
1251
1252 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001253 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001254 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001255 int mdpIndex = mCurrentFrame.layerToMDP[index];
1256 hwc_layer_1_t* layer = &list->hwLayers[index];
1257
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301258 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1259 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1260 mdpNextZOrder++;
1261 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001262 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1263 cur_pipe->zOrder = mdpNextZOrder++;
1264
radhakrishnac9a67412013-09-25 17:40:42 +05301265 private_handle_t *hnd = (private_handle_t *)layer->handle;
1266 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1267 if(configure4k2kYuv(ctx, layer,
1268 mCurrentFrame.mdpToLayer[mdpIndex])
1269 != 0 ){
1270 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1271 for layer %d",__FUNCTION__, index);
1272 return false;
1273 }
1274 else{
1275 mdpNextZOrder++;
1276 }
1277 continue;
1278 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001279 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1280 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301281 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001282 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001283 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001284 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001285 }
1286
Saurabh Shaha36be922013-12-16 18:18:39 -08001287 if(!ctx->mOverlay->validateAndSet(mDpy, ctx->dpyAttr[mDpy].fd)) {
1288 ALOGD_IF(isDebug(), "%s: Failed to validate and set overlay for dpy %d"
1289 ,__FUNCTION__, mDpy);
1290 return false;
1291 }
1292
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001293 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001294 return true;
1295}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001296
Saurabh Shah173f4242013-11-20 09:50:12 -08001297bool MDPComp::resourceCheck(hwc_context_t *ctx,
1298 hwc_display_contents_1_t *list) {
1299 const bool fbUsed = mCurrentFrame.fbCount;
1300 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1301 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1302 return false;
1303 }
1304
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001305 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001306 if(!bandwidthCheck(ctx, size)) {
1307 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1308 return false;
1309 }
1310
1311 return true;
1312}
1313
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001314double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001315 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001316 double size = 0;
1317 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001318
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001319 //Skip for targets where no device tree value for bw is supplied
1320 if(sMaxBw <= 0.0) {
1321 return 0.0;
1322 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001323
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001324 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1325 if(!mCurrentFrame.isFBComposed[i]) {
1326 hwc_layer_1_t* layer = &list->hwLayers[i];
1327 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001328 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001329 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001330 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001331 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001332 size += (bpp * (crop.right - crop.left) *
1333 (crop.bottom - crop.top) *
1334 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1335 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001336 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001337 }
1338 }
1339
1340 if(mCurrentFrame.fbCount) {
1341 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001342 int tempw, temph;
1343 size += (getBufferSizeAndDimensions(
1344 layer->displayFrame.right - layer->displayFrame.left,
1345 layer->displayFrame.bottom - layer->displayFrame.top,
1346 HAL_PIXEL_FORMAT_RGBA_8888,
1347 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001348 }
1349
1350 return size;
1351}
1352
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001353bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1354 //Skip for targets where no device tree value for bw is supplied
1355 if(sMaxBw <= 0.0) {
1356 return true;
1357 }
1358
1359 double panelRefRate =
1360 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1361 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1362 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001363 }
1364 return true;
1365}
1366
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301367bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1368 hwc_display_contents_1_t* list) {
1369
1370 //A-family hw limitation:
1371 //If a layer need alpha scaling, MDP can not support.
1372 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1373 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1374 if(!mCurrentFrame.isFBComposed[i] &&
1375 isAlphaScaled( &list->hwLayers[i])) {
1376 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1377 return false;
1378 }
1379 }
1380 }
1381
1382 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1383 //If multiple layers requires downscaling and also they are overlapping
1384 //fall back to GPU since MDSS can not handle it.
1385 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1386 qdutils::MDPVersion::getInstance().is8x26()) {
1387 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1388 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1389 if(!mCurrentFrame.isFBComposed[i] &&
1390 isDownscaleRequired(botLayer)) {
1391 //if layer-i is marked for MDP and needs downscaling
1392 //check if any MDP layer on top of i & overlaps with layer-i
1393 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1394 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1395 if(!mCurrentFrame.isFBComposed[j] &&
1396 isDownscaleRequired(topLayer)) {
1397 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1398 topLayer->displayFrame);
1399 if(isValidRect(r))
1400 return false;
1401 }
1402 }
1403 }
1404 }
1405 }
1406 return true;
1407}
1408
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001409int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001410 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001411 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001412 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001413
Raj Kamal9ed3d6b2014-02-07 16:15:17 +05301414 //Do not cache the information for next draw cycle.
1415 if(numLayers > MAX_NUM_APP_LAYERS or (!numLayers)) {
1416 ALOGI("%s: Unsupported layer count for mdp composition",
1417 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001418 mCachedFrame.reset();
1419 return -1;
1420 }
1421
Saurabh Shahb39f8152013-08-22 10:21:44 -07001422 //reset old data
1423 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001424 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1425 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301426
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001427 // Detect the start of animation and fall back to GPU only once to cache
1428 // all the layers in FB and display FB content untill animation completes.
1429 if(ctx->listStats[mDpy].isDisplayAnimating) {
1430 mCurrentFrame.needsRedraw = false;
1431 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1432 mCurrentFrame.needsRedraw = true;
1433 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1434 }
1435 setMDPCompLayerFlags(ctx, list);
1436 mCachedFrame.updateCounts(mCurrentFrame);
1437 ret = -1;
1438 return ret;
1439 } else {
1440 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1441 }
1442
Saurabh Shahb39f8152013-08-22 10:21:44 -07001443 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001444 if(isFrameDoable(ctx)) {
1445 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001446
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001447 //Convert from kbps to gbps
1448 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1449 if (ctx->mExtDisplay->isConnected() ||
1450 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1451 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001452 }
1453
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001454 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1455 setMDPCompLayerFlags(ctx, list);
1456 } else {
1457 reset(ctx);
1458 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1459 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001460 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001461 }
1462 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001463 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1464 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001465 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001466 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001467
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001468 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001469 ALOGD("GEOMETRY change: %d",
1470 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001471 android::String8 sDump("");
1472 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001473 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001474 }
1475
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001476 mCachedFrame.cacheAll(list);
1477 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001478 double panelRefRate =
1479 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1480 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001481 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001482}
1483
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001484bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx, int index) {
radhakrishnac9a67412013-09-25 17:40:42 +05301485
1486 bool bRet = true;
radhakrishnac9a67412013-09-25 17:40:42 +05301487 int mdpIndex = mCurrentFrame.layerToMDP[index];
1488 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1489 info.pipeInfo = new MdpYUVPipeInfo;
1490 info.rot = NULL;
1491 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1492 ePipeType type = MDPCOMP_OV_VG;
1493
1494 pipe_info.lIndex = ovutils::OV_INVALID;
1495 pipe_info.rIndex = ovutils::OV_INVALID;
1496
1497 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1498 if(pipe_info.lIndex == ovutils::OV_INVALID){
1499 bRet = false;
1500 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1501 __FUNCTION__);
1502 }
1503 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1504 if(pipe_info.rIndex == ovutils::OV_INVALID){
1505 bRet = false;
1506 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1507 __FUNCTION__);
1508 }
1509 return bRet;
1510}
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001511//=============MDPCompNonSplit==================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001512
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001513void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001514 hwc_display_contents_1_t*) {
radhakrishnac9a67412013-09-25 17:40:42 +05301515 //As we split 4kx2k yuv layer and program to 2 VG pipes
1516 //(if available) increase mdpcount accordingly
1517 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001518
1519 //If 4k2k Yuv layer split is possible, and if
1520 //fbz is above 4k2k layer, increment fb zorder by 1
1521 //as we split 4k2k layer and increment zorder for right half
1522 //of the layer
1523 if(mCurrentFrame.fbZ >= 0) {
1524 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1525 for(int index = 0; index < n4k2kYuvCount; index++){
1526 int n4k2kYuvIndex =
1527 ctx->listStats[mDpy].yuv4k2kIndices[index];
1528 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1529 mCurrentFrame.fbZ += 1;
1530 }
1531 }
1532 }
radhakrishnac9a67412013-09-25 17:40:42 +05301533}
1534
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001535/*
1536 * Configures pipe(s) for MDP composition
1537 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001538int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001539 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001540 MdpPipeInfoNonSplit& mdp_info =
1541 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001542 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1543 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1544 eIsFg isFg = IS_FG_OFF;
1545 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001546
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001547 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1548 __FUNCTION__, layer, zOrder, dest);
1549
Saurabh Shah88e4d272013-09-03 13:31:29 -07001550 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001551 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001552}
1553
Saurabh Shah88e4d272013-09-03 13:31:29 -07001554bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001555 hwc_display_contents_1_t* list) {
1556 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001557
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001558 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001559
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001560 hwc_layer_1_t* layer = &list->hwLayers[index];
1561 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301562 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001563 if(allocSplitVGPipesfor4k2k(ctx, index)){
radhakrishnac9a67412013-09-25 17:40:42 +05301564 continue;
1565 }
1566 }
1567
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001568 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001569 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001570 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001571 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001572 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001573 ePipeType type = MDPCOMP_OV_ANY;
1574
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001575 if(isYuvBuffer(hnd)) {
1576 type = MDPCOMP_OV_VG;
Prabhanjan Kandula47191dc2014-01-22 23:01:45 +05301577 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
1578 (ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres > 1024)) {
1579 if(qhwc::needsScaling(layer))
1580 type = MDPCOMP_OV_RGB;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301581 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001582 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1583 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001584 type = MDPCOMP_OV_DMA;
1585 }
1586
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001587 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001588 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001589 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1590 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001591 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001592 }
1593 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001594 return true;
1595}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001596
radhakrishnac9a67412013-09-25 17:40:42 +05301597int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1598 PipeLayerPair& PipeLayerPair) {
1599 MdpYUVPipeInfo& mdp_info =
1600 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1601 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1602 eIsFg isFg = IS_FG_OFF;
1603 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1604 eDest lDest = mdp_info.lIndex;
1605 eDest rDest = mdp_info.rIndex;
1606
1607 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1608 lDest, rDest, &PipeLayerPair.rot);
1609}
1610
Saurabh Shah88e4d272013-09-03 13:31:29 -07001611bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001612
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001613 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001614 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1615 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001616 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001617
1618 if(!ctx || !list) {
1619 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001620 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001621 }
1622
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301623 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1624 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1625 return true;
1626 }
1627
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -08001628 // Set the Handle timeout to true for MDP or MIXED composition.
1629 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount) {
1630 sHandleTimeout = true;
1631 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001632
1633 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001634 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001635
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001636 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1637 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001638 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001639 if(mCurrentFrame.isFBComposed[i]) continue;
1640
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001641 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001642 private_handle_t *hnd = (private_handle_t *)layer->handle;
1643 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001644 if (!(layer->flags & HWC_COLOR_FILL)) {
1645 ALOGE("%s handle null", __FUNCTION__);
1646 return false;
1647 }
1648 // No PLAY for Color layer
1649 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1650 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001651 }
1652
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001653 int mdpIndex = mCurrentFrame.layerToMDP[i];
1654
radhakrishnac9a67412013-09-25 17:40:42 +05301655 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1656 {
1657 MdpYUVPipeInfo& pipe_info =
1658 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1659 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1660 ovutils::eDest indexL = pipe_info.lIndex;
1661 ovutils::eDest indexR = pipe_info.rIndex;
1662 int fd = hnd->fd;
1663 uint32_t offset = hnd->offset;
1664 if(rot) {
1665 rot->queueBuffer(fd, offset);
1666 fd = rot->getDstMemId();
1667 offset = rot->getDstOffset();
1668 }
1669 if(indexL != ovutils::OV_INVALID) {
1670 ovutils::eDest destL = (ovutils::eDest)indexL;
1671 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1672 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1673 if (!ov.queueBuffer(fd, offset, destL)) {
1674 ALOGE("%s: queueBuffer failed for display:%d",
1675 __FUNCTION__, mDpy);
1676 return false;
1677 }
1678 }
1679
1680 if(indexR != ovutils::OV_INVALID) {
1681 ovutils::eDest destR = (ovutils::eDest)indexR;
1682 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1683 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1684 if (!ov.queueBuffer(fd, offset, destR)) {
1685 ALOGE("%s: queueBuffer failed for display:%d",
1686 __FUNCTION__, mDpy);
1687 return false;
1688 }
1689 }
1690 }
1691 else{
1692 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001693 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301694 ovutils::eDest dest = pipe_info.index;
1695 if(dest == ovutils::OV_INVALID) {
1696 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001697 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301698 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001699
radhakrishnac9a67412013-09-25 17:40:42 +05301700 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1701 continue;
1702 }
1703
1704 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1705 using pipe: %d", __FUNCTION__, layer,
1706 hnd, dest );
1707
1708 int fd = hnd->fd;
1709 uint32_t offset = hnd->offset;
1710
1711 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1712 if(rot) {
1713 if(!rot->queueBuffer(fd, offset))
1714 return false;
1715 fd = rot->getDstMemId();
1716 offset = rot->getDstOffset();
1717 }
1718
1719 if (!ov.queueBuffer(fd, offset, dest)) {
1720 ALOGE("%s: queueBuffer failed for display:%d ",
1721 __FUNCTION__, mDpy);
1722 return false;
1723 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001724 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001725
1726 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001727 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001728 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001729}
1730
Saurabh Shah88e4d272013-09-03 13:31:29 -07001731//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001732
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001733void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301734 hwc_display_contents_1_t* list){
1735 //if 4kx2k yuv layer is totally present in either in left half
1736 //or right half then try splitting the yuv layer to avoid decimation
1737 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1738 const int lSplit = getLeftSplit(ctx, mDpy);
1739 for(int index = 0; index < n4k2kYuvCount; index++){
1740 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1741 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1742 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001743 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301744 mCurrentFrame.mdpCount += 1;
1745 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001746 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1747 mCurrentFrame.fbZ += 1;
1748 }
radhakrishnac9a67412013-09-25 17:40:42 +05301749 }
1750}
1751
Saurabh Shah88e4d272013-09-03 13:31:29 -07001752bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1753 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001754 ePipeType type) {
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001755 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001756
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001757 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001758 pipe_info.lIndex = ovutils::OV_INVALID;
1759 pipe_info.rIndex = ovutils::OV_INVALID;
1760
1761 if (dst.left < lSplit) {
1762 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001763 if(pipe_info.lIndex == ovutils::OV_INVALID)
1764 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001765 }
1766
1767 if(dst.right > lSplit) {
1768 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1769 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001770 return false;
1771 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001772
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001773 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001774}
1775
Saurabh Shah88e4d272013-09-03 13:31:29 -07001776bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001777 hwc_display_contents_1_t* list) {
1778 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001779
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001780 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001781
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001782 hwc_layer_1_t* layer = &list->hwLayers[index];
1783 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301784 hwc_rect_t dst = layer->displayFrame;
1785 const int lSplit = getLeftSplit(ctx, mDpy);
1786 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1787 if((dst.left > lSplit)||(dst.right < lSplit)){
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001788 if(allocSplitVGPipesfor4k2k(ctx, index)){
radhakrishnac9a67412013-09-25 17:40:42 +05301789 continue;
1790 }
1791 }
1792 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001793 int mdpIndex = mCurrentFrame.layerToMDP[index];
1794 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001795 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001796 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001797 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001798 ePipeType type = MDPCOMP_OV_ANY;
1799
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001800 if(isYuvBuffer(hnd)) {
1801 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001802 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001803 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001804 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001805 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001806 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001807
1808 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001809 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1810 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001811 return false;
1812 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001813 }
1814 return true;
1815}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001816
radhakrishnac9a67412013-09-25 17:40:42 +05301817int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1818 PipeLayerPair& PipeLayerPair) {
1819 const int lSplit = getLeftSplit(ctx, mDpy);
1820 hwc_rect_t dst = layer->displayFrame;
1821 if((dst.left > lSplit)||(dst.right < lSplit)){
1822 MdpYUVPipeInfo& mdp_info =
1823 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1824 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1825 eIsFg isFg = IS_FG_OFF;
1826 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1827 eDest lDest = mdp_info.lIndex;
1828 eDest rDest = mdp_info.rIndex;
1829
1830 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1831 lDest, rDest, &PipeLayerPair.rot);
1832 }
1833 else{
1834 return configure(ctx, layer, PipeLayerPair);
1835 }
1836}
1837
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001838/*
1839 * Configures pipe(s) for MDP composition
1840 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001841int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001842 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001843 MdpPipeInfoSplit& mdp_info =
1844 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001845 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1846 eIsFg isFg = IS_FG_OFF;
1847 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1848 eDest lDest = mdp_info.lIndex;
1849 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001850
1851 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
1852 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
1853
Saurabh Shah88e4d272013-09-03 13:31:29 -07001854 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001855 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001856}
1857
Saurabh Shah88e4d272013-09-03 13:31:29 -07001858bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001859
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001860 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001861 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1862 return true;
1863 }
1864
1865 if(!ctx || !list) {
1866 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001867 return false;
1868 }
1869
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301870 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1871 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1872 return true;
1873 }
1874
Ramkumar Radhakrishnan92abb4f2014-02-06 21:31:29 -08001875 // Set the Handle timeout to true for MDP or MIXED composition.
1876 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount) {
1877 sHandleTimeout = true;
1878 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001879
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001880 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001881 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001882
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001883 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1884 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001885 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001886 if(mCurrentFrame.isFBComposed[i]) continue;
1887
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001888 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001889 private_handle_t *hnd = (private_handle_t *)layer->handle;
1890 if(!hnd) {
1891 ALOGE("%s handle null", __FUNCTION__);
1892 return false;
1893 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001894
1895 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1896 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001897 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001898
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001899 int mdpIndex = mCurrentFrame.layerToMDP[i];
1900
radhakrishnac9a67412013-09-25 17:40:42 +05301901 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1902 {
1903 MdpYUVPipeInfo& pipe_info =
1904 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1905 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1906 ovutils::eDest indexL = pipe_info.lIndex;
1907 ovutils::eDest indexR = pipe_info.rIndex;
1908 int fd = hnd->fd;
1909 uint32_t offset = hnd->offset;
1910 if(rot) {
1911 rot->queueBuffer(fd, offset);
1912 fd = rot->getDstMemId();
1913 offset = rot->getDstOffset();
1914 }
1915 if(indexL != ovutils::OV_INVALID) {
1916 ovutils::eDest destL = (ovutils::eDest)indexL;
1917 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1918 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1919 if (!ov.queueBuffer(fd, offset, destL)) {
1920 ALOGE("%s: queueBuffer failed for display:%d",
1921 __FUNCTION__, mDpy);
1922 return false;
1923 }
1924 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001925
radhakrishnac9a67412013-09-25 17:40:42 +05301926 if(indexR != ovutils::OV_INVALID) {
1927 ovutils::eDest destR = (ovutils::eDest)indexR;
1928 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1929 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1930 if (!ov.queueBuffer(fd, offset, destR)) {
1931 ALOGE("%s: queueBuffer failed for display:%d",
1932 __FUNCTION__, mDpy);
1933 return false;
1934 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07001935 }
1936 }
radhakrishnac9a67412013-09-25 17:40:42 +05301937 else{
1938 MdpPipeInfoSplit& pipe_info =
1939 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1940 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07001941
radhakrishnac9a67412013-09-25 17:40:42 +05301942 ovutils::eDest indexL = pipe_info.lIndex;
1943 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001944
radhakrishnac9a67412013-09-25 17:40:42 +05301945 int fd = hnd->fd;
1946 int offset = hnd->offset;
1947
1948 if(ctx->mAD->isModeOn()) {
1949 if(ctx->mAD->draw(ctx, fd, offset)) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001950 fd = ctx->mAD->getDstFd();
1951 offset = ctx->mAD->getDstOffset();
radhakrishnac9a67412013-09-25 17:40:42 +05301952 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001953 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001954
radhakrishnac9a67412013-09-25 17:40:42 +05301955 if(rot) {
1956 rot->queueBuffer(fd, offset);
1957 fd = rot->getDstMemId();
1958 offset = rot->getDstOffset();
1959 }
1960
1961 //************* play left mixer **********
1962 if(indexL != ovutils::OV_INVALID) {
1963 ovutils::eDest destL = (ovutils::eDest)indexL;
1964 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1965 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1966 if (!ov.queueBuffer(fd, offset, destL)) {
1967 ALOGE("%s: queueBuffer failed for left mixer",
1968 __FUNCTION__);
1969 return false;
1970 }
1971 }
1972
1973 //************* play right mixer **********
1974 if(indexR != ovutils::OV_INVALID) {
1975 ovutils::eDest destR = (ovutils::eDest)indexR;
1976 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1977 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1978 if (!ov.queueBuffer(fd, offset, destR)) {
1979 ALOGE("%s: queueBuffer failed for right mixer",
1980 __FUNCTION__);
1981 return false;
1982 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001983 }
1984 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001985
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001986 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1987 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001988
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001989 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001990}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001991}; //namespace
1992