blob: 0a77d6a17d7f300e6a3e1c6da9bc05c55adf6db4 [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Saurabh Shahd53bc5f2014-02-05 10:17:43 -0800236 fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Saurabh Shah4fdde762013-04-30 18:47:33 -0700303 const int dpy = HWC_DISPLAY_PRIMARY;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800304 private_handle_t *hnd = (private_handle_t *)layer->handle;
305
306 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700307 if (layer->flags & HWC_COLOR_FILL) {
308 // Color layer
309 return true;
310 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800311 ALOGE("%s: layer handle is NULL", __FUNCTION__);
312 return false;
313 }
314
Naseer Ahmede850a802013-09-06 13:12:52 -0400315 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400316 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400317 return false;
318
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800319 int hw_w = ctx->dpyAttr[mDpy].xres;
320 int hw_h = ctx->dpyAttr[mDpy].yres;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800321
Saurabh Shah62e1d732013-09-17 10:44:05 -0700322 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700323 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700324 int crop_w = crop.right - crop.left;
325 int crop_h = crop.bottom - crop.top;
326 int dst_w = dst.right - dst.left;
327 int dst_h = dst.bottom - dst.top;
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800328 float w_scale = ((float)crop_w / (float)dst_w);
329 float h_scale = ((float)crop_h / (float)dst_h);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700330
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800331 /* Workaround for MDP HW limitation in DSI command mode panels where
332 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
333 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530334 * There also is a HW limilation in MDP, minimum block size is 2x2
335 * Fallback to GPU if height is less than 2.
336 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800337 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800338 return false;
339
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800340 if((w_scale > 1.0f) || (h_scale > 1.0f)) {
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800341 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700342 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800343 const float w_dscale = w_scale;
344 const float h_dscale = h_scale;
345
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800346 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
347 /* Workaround for downscales larger than 4x.
348 * Will be removed once decimator block is enabled for MDSS
349 */
350 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
351 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
352 h_dscale > downscale)
353 return false;
354 } else {
355 if(w_dscale > 64 || h_dscale > 64)
356 return false;
357 }
358 } else { //A-family
359 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700360 return false;
361 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700362 }
363
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800364 if((w_scale < 1.0f) || (h_scale < 1.0f)) {
365 const uint32_t upscale =
366 qdutils::MDPVersion::getInstance().getMaxMDPUpscale();
367 const float w_uscale = 1.0f / w_scale;
368 const float h_uscale = 1.0f / h_scale;
369
370 if(w_uscale > upscale || h_uscale > upscale)
371 return false;
372 }
373
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800374 return true;
375}
376
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700377ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
378 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800379 overlay::Overlay& ov = *ctx->mOverlay;
380 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
381
382 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800383 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700384 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800385 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800386 return mdp_pipe;
387 }
388 case MDPCOMP_OV_ANY:
389 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700390 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800391 if(mdp_pipe != ovutils::OV_INVALID) {
392 return mdp_pipe;
393 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800394
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800395 if(type == MDPCOMP_OV_RGB) {
396 //Requested only for RGB pipe
397 break;
398 }
399 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700400 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800401 default:
402 ALOGE("%s: Invalid pipe type",__FUNCTION__);
403 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800404 };
405 return ovutils::OV_INVALID;
406}
407
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800408bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700409 bool ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700410 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800411
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800412 if(!isEnabled()) {
413 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700414 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700415 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800416 ctx->mVideoTransFlag &&
417 isSecondaryConnected(ctx)) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700418 //1 Padding round to shift pipes across mixers
419 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
420 __FUNCTION__);
421 ret = false;
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800422 } else if(isSecondaryConfiguring(ctx)) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800423 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800424 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700425 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700426 } else if(ctx->isPaddingRound) {
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700427 ctx->isPaddingRound = false;
428 ALOGD_IF(isDebug(), "%s: padding round",__FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700429 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700430 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700431 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800432}
433
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800434/*
435 * 1) Identify layers that are not visible in the updating ROI and drop them
436 * from composition.
437 * 2) If we have a scaling layers which needs cropping against generated ROI.
438 * Reset ROI to full resolution.
439 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700440bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
441 hwc_display_contents_1_t* list, hwc_rect_t roi) {
442 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
443
444 if(!isValidRect(roi))
445 return false;
446
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800447 hwc_rect_t visibleRect = roi;
448
449 for(int i = numAppLayers - 1; i >= 0; i--){
450
451 if(!isValidRect(visibleRect)) {
452 mCurrentFrame.drop[i] = true;
453 mCurrentFrame.dropCount++;
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800454 continue;
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800455 }
456
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700457 const hwc_layer_1_t* layer = &list->hwLayers[i];
458
459 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700460 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700461 int transform = layer->transform;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700462
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800463 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700464
465 int res_w = res.right - res.left;
466 int res_h = res.bottom - res.top;
467 int dst_w = dstRect.right - dstRect.left;
468 int dst_h = dstRect.bottom - dstRect.top;
469
470 if(!isValidRect(res)) {
471 mCurrentFrame.drop[i] = true;
472 mCurrentFrame.dropCount++;
473 }else {
474 /* Reset frame ROI when any layer which needs scaling also needs ROI
475 * cropping */
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800476 if((res_w != dst_w || res_h != dst_h) && needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800477 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700478 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
479 mCurrentFrame.dropCount = 0;
480 return false;
481 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800482
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800483 /* deduct any opaque region from visibleRect */
484 if (layer->blending == HWC_BLENDING_NONE)
485 visibleRect = deductRect(visibleRect, res);
486 }
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700487 }
488 return true;
489}
490
491void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
492 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
493
494 if(!sEnablePartialFrameUpdate) {
495 return;
496 }
497
498 if(mDpy || isDisplaySplit(ctx, mDpy)){
499 ALOGE_IF(isDebug(), "%s: ROI not supported for"
500 "the (1) external / virtual display's (2) dual DSI displays",
501 __FUNCTION__);
502 return;
503 }
504
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800505 if(isSkipPresent(ctx, mDpy))
506 return;
507
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700508 if(list->flags & HWC_GEOMETRY_CHANGED)
509 return;
510
511 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
512 for(int index = 0; index < numAppLayers; index++ ) {
513 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
514 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
515 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700516 hwc_rect_t srcRect = integerizeSourceCrop(
517 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700518 int transform = list->hwLayers[index].transform;
519
520 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700521 roi = getUnion(roi, dstRect);
522 }
523 }
524
525 if(!validateAndApplyROI(ctx, list, roi)){
526 roi = (struct hwc_rect) {0, 0,
527 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
528 }
529
530 ctx->listStats[mDpy].roi.x = roi.left;
531 ctx->listStats[mDpy].roi.y = roi.top;
532 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
533 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
534
535 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
536 roi.left, roi.top, roi.right, roi.bottom);
537}
538
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800539/* Checks for conditions where all the layers marked for MDP comp cannot be
540 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800541bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800542 hwc_display_contents_1_t* list){
543
Saurabh Shahaa236822013-04-24 18:07:26 -0700544 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800545 int priDispW = ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800546
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700547 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700548 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
549 return false;
550 }
551
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800552 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700553 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
554 __FUNCTION__,
555 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800556 return false;
557 }
558
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800559 if(mDpy > HWC_DISPLAY_PRIMARY && (priDispW > MAX_DISPLAY_DIM) &&
560 (ctx->dpyAttr[mDpy].xres < MAX_DISPLAY_DIM)) {
561 // Disable MDP comp on Secondary when the primary is highres panel and
562 // the secondary is a normal 1080p, because, MDP comp on secondary under
563 // in such usecase, decimation gets used for downscale and there will be
564 // a quality mismatch when there will be a fallback to GPU comp
565 ALOGD_IF(isDebug(), "%s: Disable MDP Compositon for Secondary Disp",
566 __FUNCTION__);
567 return false;
568 }
569
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800570 // check for action safe flag and downscale mode which requires scaling.
571 if(ctx->dpyAttr[mDpy].mActionSafePresent
572 || ctx->dpyAttr[mDpy].mDownScaleMode) {
573 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
574 return false;
575 }
576
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800577 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800578 hwc_layer_1_t* layer = &list->hwLayers[i];
579 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800580
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700581 if(isYuvBuffer(hnd) && has90Transform(layer)) {
582 if(!canUseRotator(ctx, mDpy)) {
583 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
584 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700585 return false;
586 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800587 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530588
589 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
590 // may not need it if Gfx pre-rotation can handle all flips & rotations
591 if(qdutils::MDPVersion::getInstance().is8x26() &&
592 (ctx->dpyAttr[mDpy].xres > 1024) &&
593 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
594 (!isYuvBuffer(hnd)))
595 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800596 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700597
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700598 if(ctx->mAD->isDoable()) {
599 return false;
600 }
601
Saurabh Shahaa236822013-04-24 18:07:26 -0700602 //If all above hard conditions are met we can do full or partial MDP comp.
603 bool ret = false;
604 if(fullMDPComp(ctx, list)) {
605 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700606 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700607 ret = true;
608 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530609
Saurabh Shahaa236822013-04-24 18:07:26 -0700610 return ret;
611}
612
613bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700614 //Will benefit presentation / secondary-only layer.
615 if((mDpy > HWC_DISPLAY_PRIMARY) &&
616 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
617 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
618 return false;
619 }
620
621 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
622 for(int i = 0; i < numAppLayers; i++) {
623 hwc_layer_1_t* layer = &list->hwLayers[i];
624 if(not isSupportedForMDPComp(ctx, layer)) {
625 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
626 return false;
627 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800628
629 //For 8x26, if there is only one layer which needs scale for secondary
630 //while no scale for primary display, DMA pipe is occupied by primary.
631 //If need to fall back to GLES composition, virtual display lacks DMA
632 //pipe and error is reported.
633 if(qdutils::MDPVersion::getInstance().is8x26() &&
634 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530635 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800636 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700637 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800638
Saurabh Shahaa236822013-04-24 18:07:26 -0700639 mCurrentFrame.fbCount = 0;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700640 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
641 sizeof(mCurrentFrame.isFBComposed));
642 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
643 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700644
radhakrishnac9a67412013-09-25 17:40:42 +0530645 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800646 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530647 }
648
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800649 if(!postHeuristicsHandling(ctx, list)) {
650 ALOGD_IF(isDebug(), "post heuristic handling failed");
651 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700652 return false;
653 }
654
Saurabh Shahaa236822013-04-24 18:07:26 -0700655 return true;
656}
657
658bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
659{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700660 if(!sEnableMixedMode) {
661 //Mixed mode is disabled. No need to even try caching.
662 return false;
663 }
664
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700665 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800666 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
667 ret = loadBasedCompPreferGPU(ctx, list) or
668 loadBasedCompPreferMDP(ctx, list) or
669 cacheBasedComp(ctx, list);
670 } else {
671 ret = cacheBasedComp(ctx, list) or
672 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800673 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700674 }
675
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700676 return ret;
677}
678
679bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
680 hwc_display_contents_1_t* list) {
681 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700682 mCurrentFrame.reset(numAppLayers);
683 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700684
685 //If an MDP marked layer is unsupported cannot do partial MDP Comp
686 for(int i = 0; i < numAppLayers; i++) {
687 if(!mCurrentFrame.isFBComposed[i]) {
688 hwc_layer_1_t* layer = &list->hwLayers[i];
689 if(not isSupportedForMDPComp(ctx, layer)) {
690 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
691 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800692 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700693 return false;
694 }
695 }
696 }
697
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700698 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530699 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700700 if(!ret) {
701 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800702 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700703 return false;
704 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700705
706 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700707
radhakrishnac9a67412013-09-25 17:40:42 +0530708 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800709 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530710 }
711
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700712 //Will benefit cases where a video has non-updating background.
713 if((mDpy > HWC_DISPLAY_PRIMARY) and
714 (mdpCount > MAX_SEC_LAYERS)) {
715 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800716 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700717 return false;
718 }
719
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800720 if(!postHeuristicsHandling(ctx, list)) {
721 ALOGD_IF(isDebug(), "post heuristic handling failed");
722 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700723 return false;
724 }
725
Saurabh Shahaa236822013-04-24 18:07:26 -0700726 return true;
727}
728
Saurabh Shahb772ae32013-11-18 15:40:02 -0800729bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700730 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800731 if(not isLoadBasedCompDoable(ctx, list)) {
732 return false;
733 }
734
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700735 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
736 mCurrentFrame.reset(numAppLayers);
737
Saurabh Shahb772ae32013-11-18 15:40:02 -0800738 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
739 mDpy, Overlay::MIXER_DEFAULT));
740 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800741 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
742 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800743
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700744 if(batchSize <= 0) {
745 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
746 return false;
747 }
748
749 int minBatchStart = -1;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800750 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700751 size_t minBatchPixelCount = SIZE_MAX;
752
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800753 /* Iterate through the layer list to find out a contigous batch of batchSize
754 * non-dropped layers with loweest pixel count */
755 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
756 if(mCurrentFrame.drop[i])
757 continue;
758
759 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700760 uint32_t batchPixelCount = 0;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800761 int j = i;
762 for(; j < numAppLayers && batchCount; j++){
763 if(!mCurrentFrame.drop[j]) {
764 hwc_layer_1_t* layer = &list->hwLayers[j];
765 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
766 hwc_rect_t dst = layer->displayFrame;
767
768 /* If we have a valid ROI, count pixels only for the MDP fetched
769 * region of the buffer */
770 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
771 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
772 hwc_rect_t roi;
773 roi.left = ctx->listStats[mDpy].roi.x;
774 roi.top = ctx->listStats[mDpy].roi.y;
775 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
776 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
777
778 /* valid ROI means no scaling layer is composed. So check
779 * only intersection to find actual fetched pixels */
780 crop = getIntersection(roi, dst);
781 }
782
783 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700784 (crop.bottom - crop.top);
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800785 batchCount--;
786 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700787 }
788
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800789 /* we dont want to program any batch of size lesser than batchSize */
790 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700791 minBatchPixelCount = batchPixelCount;
792 minBatchStart = i;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800793 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700794 }
795 }
796
797 if(minBatchStart < 0) {
798 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
799 __FUNCTION__, batchSize, numAppLayers);
800 return false;
801 }
802
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800803 /* non-dropped layers falling ouside the selected batch will be marked for
804 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700805 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800806 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700807 hwc_layer_1_t* layer = &list->hwLayers[i];
808 if(not isSupportedForMDPComp(ctx, layer)) {
809 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
810 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800811 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700812 return false;
813 }
814 mCurrentFrame.isFBComposed[i] = false;
815 }
816 }
817
818 mCurrentFrame.fbZ = minBatchStart;
819 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800820 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
821 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700822
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800823 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
824 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
825 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800826
radhakrishnac9a67412013-09-25 17:40:42 +0530827 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800828 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530829 }
830
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800831 if(!postHeuristicsHandling(ctx, list)) {
832 ALOGD_IF(isDebug(), "post heuristic handling failed");
833 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700834 return false;
835 }
836
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700837 return true;
838}
839
Saurabh Shahb772ae32013-11-18 15:40:02 -0800840bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
841 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800842 if(not isLoadBasedCompDoable(ctx, list)) {
843 return false;
844 }
845
Saurabh Shahb772ae32013-11-18 15:40:02 -0800846 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800847 mCurrentFrame.reset(numAppLayers);
848
Saurabh Shahb772ae32013-11-18 15:40:02 -0800849 //Full screen is from ib perspective, not actual full screen
850 const int bpp = 4;
851 double panelRefRate =
852 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
853
854 double bwLeft = sMaxBw - sBwClaimed;
855
856 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
857 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
858
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800859 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
860 - (fullScreenLayers - 1);
861
Saurabh Shahb772ae32013-11-18 15:40:02 -0800862 //If batch size is not at least 2, we aren't really preferring MDP, since
863 //only 1 layer going to GPU could actually translate into an entire FB
864 //needed to be fetched by MDP, thus needing more b/w rather than less.
865 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
866 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
867 return false;
868 }
869
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800870 //Find top fbBatchSize non-dropped layers to get your batch
871 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
872 for(int i = numAppLayers - 1; i >= 0; i--) {
873 if(mCurrentFrame.drop[i])
874 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800875
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800876 if(fbEnd < 0)
877 fbEnd = i;
878
879 if(!(--batchCount)) {
880 fbStart = i;
881 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800882 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800883 }
884
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800885 //Bottom layers constitute MDP batch
886 for(int i = 0; i < fbStart; i++) {
887 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
888 hwc_layer_1_t* layer = &list->hwLayers[i];
889 if(not isSupportedForMDPComp(ctx, layer)) {
890 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
891 __FUNCTION__, i);
892 reset(ctx);
893 return false;
894 }
895 mCurrentFrame.isFBComposed[i] = false;
896 }
897 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800898
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800899 mCurrentFrame.fbZ = fbStart;
900 mCurrentFrame.fbCount = fbBatchSize;
901 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
902 - mCurrentFrame.dropCount;
903
904 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
905 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
906 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800907
radhakrishnac9a67412013-09-25 17:40:42 +0530908 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800909 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530910 }
911
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800912 if(!postHeuristicsHandling(ctx, list)) {
913 ALOGD_IF(isDebug(), "post heuristic handling failed");
914 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800915 return false;
916 }
917
Saurabh Shahb772ae32013-11-18 15:40:02 -0800918 return true;
919}
920
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700921bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx,
922 hwc_display_contents_1_t* list) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530923 if(mDpy or isSecurePresent(ctx, mDpy) or
924 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700925 return false;
926 }
927 return true;
928}
929
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800930bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
931 hwc_display_contents_1_t* list) {
932 const bool secureOnly = true;
933 return videoOnlyComp(ctx, list, not secureOnly) or
934 videoOnlyComp(ctx, list, secureOnly);
935}
936
937bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700938 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700939 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700940
Saurabh Shahaa236822013-04-24 18:07:26 -0700941 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700942 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700943 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700944
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800945 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
946 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700947 return false;
948 }
949
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800950 /* Bail out if we are processing only secured video layers
951 * and we dont have any */
952 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800953 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800954 return false;
955 }
956
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800957 if(mCurrentFrame.fbCount)
958 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700959
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800960 if(sEnable4k2kYUVSplit){
961 adjustForSourceSplit(ctx, list);
962 }
963
964 if(!postHeuristicsHandling(ctx, list)) {
965 ALOGD_IF(isDebug(), "post heuristic handling failed");
966 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700967 return false;
968 }
969
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800970 return true;
971}
972
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800973/* Checks for conditions where YUV layers cannot be bypassed */
974bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700975 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700976 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800977 return false;
978 }
979
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700980 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
981 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
982 return false;
983 }
984
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800985 if(isSecuring(ctx, layer)) {
986 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
987 return false;
988 }
989
Saurabh Shah4fdde762013-04-30 18:47:33 -0700990 if(!isValidDimension(ctx, layer)) {
991 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
992 __FUNCTION__);
993 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800994 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700995
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400996 if(layer->planeAlpha < 0xFF) {
997 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
998 in video only mode",
999 __FUNCTION__);
1000 return false;
1001 }
1002
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001003 return true;
1004}
1005
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301006/* starts at fromIndex and check for each layer to find
1007 * if it it has overlapping with any Updating layer above it in zorder
1008 * till the end of the batch. returns true if it finds any intersection */
1009bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
1010 int fromIndex, int toIndex) {
1011 for(int i = fromIndex; i < toIndex; i++) {
1012 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1013 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
1014 return false;
1015 }
1016 }
1017 }
1018 return true;
1019}
1020
1021/* Checks if given layer at targetLayerIndex has any
1022 * intersection with all the updating layers in beween
1023 * fromIndex and toIndex. Returns true if it finds intersectiion */
1024bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1025 int fromIndex, int toIndex, int targetLayerIndex) {
1026 for(int i = fromIndex; i <= toIndex; i++) {
1027 if(!mCurrentFrame.isFBComposed[i]) {
1028 if(areLayersIntersecting(&list->hwLayers[i],
1029 &list->hwLayers[targetLayerIndex])) {
1030 return true;
1031 }
1032 }
1033 }
1034 return false;
1035}
1036
1037int MDPComp::getBatch(hwc_display_contents_1_t* list,
1038 int& maxBatchStart, int& maxBatchEnd,
1039 int& maxBatchCount) {
1040 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301041 int fbZOrder =-1;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001042 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301043 while (i < mCurrentFrame.layerCount) {
1044 int batchCount = 0;
1045 int batchStart = i;
1046 int batchEnd = i;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001047 /* Adjust batch Z order with the dropped layers so far */
1048 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301049 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301050 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301051 while(i < mCurrentFrame.layerCount) {
1052 if(!mCurrentFrame.isFBComposed[i]) {
1053 if(!batchCount) {
1054 i++;
1055 break;
1056 }
1057 updatingLayersAbove++;
1058 i++;
1059 continue;
1060 } else {
1061 if(mCurrentFrame.drop[i]) {
1062 i++;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001063 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301064 continue;
1065 } else if(updatingLayersAbove <= 0) {
1066 batchCount++;
1067 batchEnd = i;
1068 i++;
1069 continue;
1070 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1071
1072 // We have a valid updating layer already. If layer-i not
1073 // have overlapping with all updating layers in between
1074 // batch-start and i, then we can add layer i to batch.
1075 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1076 batchCount++;
1077 batchEnd = i;
1078 i++;
1079 continue;
1080 } else if(canPushBatchToTop(list, batchStart, i)) {
1081 //If All the non-updating layers with in this batch
1082 //does not have intersection with the updating layers
1083 //above in z-order, then we can safely move the batch to
1084 //higher z-order. Increment fbZ as it is moving up.
1085 if( firstZReverseIndex < 0) {
1086 firstZReverseIndex = i;
1087 }
1088 batchCount++;
1089 batchEnd = i;
1090 fbZ += updatingLayersAbove;
1091 i++;
1092 updatingLayersAbove = 0;
1093 continue;
1094 } else {
1095 //both failed.start the loop again from here.
1096 if(firstZReverseIndex >= 0) {
1097 i = firstZReverseIndex;
1098 }
1099 break;
1100 }
1101 }
1102 }
1103 }
1104 if(batchCount > maxBatchCount) {
1105 maxBatchCount = batchCount;
1106 maxBatchStart = batchStart;
1107 maxBatchEnd = batchEnd;
1108 fbZOrder = fbZ;
1109 }
1110 }
1111 return fbZOrder;
1112}
1113
1114bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1115 hwc_display_contents_1_t* list) {
1116 /* Idea is to keep as many non-updating(cached) layers in FB and
1117 * send rest of them through MDP. This is done in 2 steps.
1118 * 1. Find the maximum contiguous batch of non-updating layers.
1119 * 2. See if we can improve this batch size for caching by adding
1120 * opaque layers around the batch, if they don't have
1121 * any overlapping with the updating layers in between.
1122 * NEVER mark an updating layer for caching.
1123 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001124
1125 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001126 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001127 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301128 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001129
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001130 /* Nothing is cached. No batching needed */
1131 if(mCurrentFrame.fbCount == 0) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001132 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001133 }
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001134
1135 /* No MDP comp layers, try to use other comp modes */
1136 if(mCurrentFrame.mdpCount == 0) {
1137 return false;
Saurabh Shahaa236822013-04-24 18:07:26 -07001138 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001139
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301140 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001141
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301142 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001143 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001144 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001145 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301146 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001147 if(!mCurrentFrame.drop[i]){
1148 //If an unsupported layer is being attempted to
1149 //be pulled out we should fail
1150 if(not isSupportedForMDPComp(ctx, layer)) {
1151 return false;
1152 }
1153 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001154 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001155 }
1156 }
1157
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301158 // update the frame data
1159 mCurrentFrame.fbZ = fbZ;
1160 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001161 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001162 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001163
1164 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301165 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001166
1167 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001168}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001169
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001170void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001171 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001172 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001173 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001174
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001175 for(int i = 0; i < numAppLayers; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001176 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001177 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001178 if(!mCurrentFrame.drop[i])
1179 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001180 mCurrentFrame.isFBComposed[i] = true;
1181 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001182 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001183 }
1184 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001185
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001186 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001187 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1188 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001189
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001190 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1191 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1192 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001193}
1194
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001195void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1196 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001197 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1198 for(int index = 0;index < nYuvCount; index++){
1199 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1200 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1201
1202 if(!isYUVDoable(ctx, layer)) {
1203 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1204 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1205 mCurrentFrame.fbCount++;
1206 }
1207 } else {
1208 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001209 private_handle_t *hnd = (private_handle_t *)layer->handle;
1210 if(!secureOnly || isSecureBuffer(hnd)) {
1211 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1212 mCurrentFrame.fbCount--;
1213 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001214 }
1215 }
1216 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001217
1218 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001219 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1220 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001221 mCurrentFrame.fbCount);
1222}
1223
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001224bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1225 hwc_display_contents_1_t* list) {
1226
1227 //Capability checks
1228 if(!resourceCheck(ctx, list)) {
1229 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1230 return false;
1231 }
1232
1233 //Limitations checks
1234 if(!hwLimitationsCheck(ctx, list)) {
1235 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1236 return false;
1237 }
1238
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001239 //Configure framebuffer first if applicable
1240 if(mCurrentFrame.fbZ >= 0) {
1241 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1242 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1243 __FUNCTION__);
1244 return false;
1245 }
1246 }
1247
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001248 mCurrentFrame.map();
1249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001250 if(!allocLayerPipes(ctx, list)) {
1251 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001252 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001253 }
1254
1255 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001256 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001257 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001258 int mdpIndex = mCurrentFrame.layerToMDP[index];
1259 hwc_layer_1_t* layer = &list->hwLayers[index];
1260
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301261 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1262 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1263 mdpNextZOrder++;
1264 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001265 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1266 cur_pipe->zOrder = mdpNextZOrder++;
1267
radhakrishnac9a67412013-09-25 17:40:42 +05301268 private_handle_t *hnd = (private_handle_t *)layer->handle;
1269 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1270 if(configure4k2kYuv(ctx, layer,
1271 mCurrentFrame.mdpToLayer[mdpIndex])
1272 != 0 ){
1273 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1274 for layer %d",__FUNCTION__, index);
1275 return false;
1276 }
1277 else{
1278 mdpNextZOrder++;
1279 }
1280 continue;
1281 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001282 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1283 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301284 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001285 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001286 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001287 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001288 }
1289
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001290 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001291 return true;
1292}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001293
Saurabh Shah173f4242013-11-20 09:50:12 -08001294bool MDPComp::resourceCheck(hwc_context_t *ctx,
1295 hwc_display_contents_1_t *list) {
1296 const bool fbUsed = mCurrentFrame.fbCount;
1297 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1298 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1299 return false;
1300 }
1301
1302 if(!arePipesAvailable(ctx, list)) {
1303 return false;
1304 }
1305
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001306 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001307 if(!bandwidthCheck(ctx, size)) {
1308 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1309 return false;
1310 }
1311
1312 return true;
1313}
1314
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001315double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001316 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001317 double size = 0;
1318 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001319
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001320 //Skip for targets where no device tree value for bw is supplied
1321 if(sMaxBw <= 0.0) {
1322 return 0.0;
1323 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001324
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001325 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1326 if(!mCurrentFrame.isFBComposed[i]) {
1327 hwc_layer_1_t* layer = &list->hwLayers[i];
1328 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001329 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001330 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001331 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001332 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001333 size += (bpp * (crop.right - crop.left) *
1334 (crop.bottom - crop.top) *
1335 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1336 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001337 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001338 }
1339 }
1340
1341 if(mCurrentFrame.fbCount) {
1342 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001343 int tempw, temph;
1344 size += (getBufferSizeAndDimensions(
1345 layer->displayFrame.right - layer->displayFrame.left,
1346 layer->displayFrame.bottom - layer->displayFrame.top,
1347 HAL_PIXEL_FORMAT_RGBA_8888,
1348 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001349 }
1350
1351 return size;
1352}
1353
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001354bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1355 //Skip for targets where no device tree value for bw is supplied
1356 if(sMaxBw <= 0.0) {
1357 return true;
1358 }
1359
1360 double panelRefRate =
1361 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1362 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1363 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001364 }
1365 return true;
1366}
1367
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301368bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1369 hwc_display_contents_1_t* list) {
1370
1371 //A-family hw limitation:
1372 //If a layer need alpha scaling, MDP can not support.
1373 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1374 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1375 if(!mCurrentFrame.isFBComposed[i] &&
1376 isAlphaScaled( &list->hwLayers[i])) {
1377 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1378 return false;
1379 }
1380 }
1381 }
1382
1383 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1384 //If multiple layers requires downscaling and also they are overlapping
1385 //fall back to GPU since MDSS can not handle it.
1386 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1387 qdutils::MDPVersion::getInstance().is8x26()) {
1388 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1389 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1390 if(!mCurrentFrame.isFBComposed[i] &&
1391 isDownscaleRequired(botLayer)) {
1392 //if layer-i is marked for MDP and needs downscaling
1393 //check if any MDP layer on top of i & overlaps with layer-i
1394 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1395 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1396 if(!mCurrentFrame.isFBComposed[j] &&
1397 isDownscaleRequired(topLayer)) {
1398 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1399 topLayer->displayFrame);
1400 if(isValidRect(r))
1401 return false;
1402 }
1403 }
1404 }
1405 }
1406 }
1407 return true;
1408}
1409
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001410int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001411 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001412 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001413 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001414
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001415 //number of app layers exceeds MAX_NUM_APP_LAYERS fall back to GPU
1416 //do not cache the information for next draw cycle.
1417 if(numLayers > MAX_NUM_APP_LAYERS) {
1418 ALOGI("%s: Number of App layers exceeded the limit ",
1419 __FUNCTION__);
1420 mCachedFrame.reset();
1421 return -1;
1422 }
1423
Saurabh Shahb39f8152013-08-22 10:21:44 -07001424 //reset old data
1425 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001426 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1427 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301428
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001429 // Detect the start of animation and fall back to GPU only once to cache
1430 // all the layers in FB and display FB content untill animation completes.
1431 if(ctx->listStats[mDpy].isDisplayAnimating) {
1432 mCurrentFrame.needsRedraw = false;
1433 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1434 mCurrentFrame.needsRedraw = true;
1435 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1436 }
1437 setMDPCompLayerFlags(ctx, list);
1438 mCachedFrame.updateCounts(mCurrentFrame);
1439 ret = -1;
1440 return ret;
1441 } else {
1442 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1443 }
1444
Saurabh Shahb39f8152013-08-22 10:21:44 -07001445 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001446 if(isFrameDoable(ctx)) {
1447 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001448
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001449 //Convert from kbps to gbps
1450 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1451 if (ctx->mExtDisplay->isConnected() ||
1452 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1453 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001454 }
1455
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001456 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1457 setMDPCompLayerFlags(ctx, list);
1458 } else {
1459 reset(ctx);
1460 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1461 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001462 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001463 }
1464 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001465 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1466 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001467 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001468 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001469
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001470 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001471 ALOGD("GEOMETRY change: %d",
1472 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001473 android::String8 sDump("");
1474 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001475 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001476 }
1477
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001478 mCachedFrame.cacheAll(list);
1479 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001480 double panelRefRate =
1481 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1482 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001483 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001484}
1485
radhakrishnac9a67412013-09-25 17:40:42 +05301486bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx,
1487 hwc_display_contents_1_t* list, int index) {
1488
1489 bool bRet = true;
1490 hwc_layer_1_t* layer = &list->hwLayers[index];
1491 private_handle_t *hnd = (private_handle_t *)layer->handle;
1492 int mdpIndex = mCurrentFrame.layerToMDP[index];
1493 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1494 info.pipeInfo = new MdpYUVPipeInfo;
1495 info.rot = NULL;
1496 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1497 ePipeType type = MDPCOMP_OV_VG;
1498
1499 pipe_info.lIndex = ovutils::OV_INVALID;
1500 pipe_info.rIndex = ovutils::OV_INVALID;
1501
1502 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1503 if(pipe_info.lIndex == ovutils::OV_INVALID){
1504 bRet = false;
1505 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1506 __FUNCTION__);
1507 }
1508 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1509 if(pipe_info.rIndex == ovutils::OV_INVALID){
1510 bRet = false;
1511 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1512 __FUNCTION__);
1513 }
1514 return bRet;
1515}
Saurabh Shah88e4d272013-09-03 13:31:29 -07001516//=============MDPCompNonSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001517
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001518void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301519 hwc_display_contents_1_t* list){
1520 //As we split 4kx2k yuv layer and program to 2 VG pipes
1521 //(if available) increase mdpcount accordingly
1522 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001523
1524 //If 4k2k Yuv layer split is possible, and if
1525 //fbz is above 4k2k layer, increment fb zorder by 1
1526 //as we split 4k2k layer and increment zorder for right half
1527 //of the layer
1528 if(mCurrentFrame.fbZ >= 0) {
1529 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1530 for(int index = 0; index < n4k2kYuvCount; index++){
1531 int n4k2kYuvIndex =
1532 ctx->listStats[mDpy].yuv4k2kIndices[index];
1533 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1534 mCurrentFrame.fbZ += 1;
1535 }
1536 }
1537 }
radhakrishnac9a67412013-09-25 17:40:42 +05301538}
1539
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001540/*
1541 * Configures pipe(s) for MDP composition
1542 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001543int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001544 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001545 MdpPipeInfoNonSplit& mdp_info =
1546 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001547 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1548 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1549 eIsFg isFg = IS_FG_OFF;
1550 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001551
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001552 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1553 __FUNCTION__, layer, zOrder, dest);
1554
Saurabh Shah88e4d272013-09-03 13:31:29 -07001555 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001556 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001557}
1558
Saurabh Shah88e4d272013-09-03 13:31:29 -07001559bool MDPCompNonSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001560 hwc_display_contents_1_t* list) {
1561 overlay::Overlay& ov = *ctx->mOverlay;
1562 int numPipesNeeded = mCurrentFrame.mdpCount;
1563 int availPipes = ov.availablePipes(mDpy, Overlay::MIXER_DEFAULT);
1564
1565 //Reserve pipe for FB
1566 if(mCurrentFrame.fbCount)
1567 availPipes -= 1;
1568
1569 if(numPipesNeeded > availPipes) {
1570 ALOGD_IF(isDebug(), "%s: Insufficient pipes, dpy %d needed %d, avail %d",
1571 __FUNCTION__, mDpy, numPipesNeeded, availPipes);
1572 return false;
1573 }
1574
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001575 if(not areVGPipesAvailable(ctx, list)) {
1576 return false;
1577 }
1578
1579 return true;
1580}
1581
1582bool MDPCompNonSplit::areVGPipesAvailable(hwc_context_t *ctx,
1583 hwc_display_contents_1_t* list) {
1584 overlay::Overlay& ov = *ctx->mOverlay;
1585 int pipesNeeded = 0;
1586 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1587 if(!mCurrentFrame.isFBComposed[i]) {
1588 hwc_layer_1_t* layer = &list->hwLayers[i];
1589 hwc_rect_t dst = layer->displayFrame;
1590 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301591 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1592 pipesNeeded = pipesNeeded + 2;
1593 }
1594 else if(isYuvBuffer(hnd)) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001595 pipesNeeded++;
1596 }
1597 }
1598 }
1599
1600 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1601 if(pipesNeeded > availableVGPipes) {
1602 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1603 "dpy %d needed %d, avail %d",
1604 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1605 return false;
1606 }
1607
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001608 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001609}
1610
Saurabh Shah88e4d272013-09-03 13:31:29 -07001611bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001612 hwc_display_contents_1_t* list) {
1613 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001614
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001615 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001616
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001617 hwc_layer_1_t* layer = &list->hwLayers[index];
1618 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301619 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1620 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1621 continue;
1622 }
1623 }
1624
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001625 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001626 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001627 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001628 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001629 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001630 ePipeType type = MDPCOMP_OV_ANY;
1631
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001632 if(isYuvBuffer(hnd)) {
1633 type = MDPCOMP_OV_VG;
Prabhanjan Kandula47191dc2014-01-22 23:01:45 +05301634 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
1635 (ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres > 1024)) {
1636 if(qhwc::needsScaling(layer))
1637 type = MDPCOMP_OV_RGB;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301638 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001639 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1640 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001641 type = MDPCOMP_OV_DMA;
1642 }
1643
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001644 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001645 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001646 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1647 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001648 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001649 }
1650 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001651 return true;
1652}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001653
radhakrishnac9a67412013-09-25 17:40:42 +05301654int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1655 PipeLayerPair& PipeLayerPair) {
1656 MdpYUVPipeInfo& mdp_info =
1657 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1658 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1659 eIsFg isFg = IS_FG_OFF;
1660 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1661 eDest lDest = mdp_info.lIndex;
1662 eDest rDest = mdp_info.rIndex;
1663
1664 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1665 lDest, rDest, &PipeLayerPair.rot);
1666}
1667
Saurabh Shah88e4d272013-09-03 13:31:29 -07001668bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001669
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001670 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001671 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1672 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001673 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001674
1675 if(!ctx || !list) {
1676 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001677 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001678 }
1679
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301680 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1681 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1682 return true;
1683 }
1684
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001685 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001686 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08001687 idleInvalidator->handleUpdateEvent();
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001688
1689 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001690 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001691
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001692 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1693 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001694 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001695 if(mCurrentFrame.isFBComposed[i]) continue;
1696
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001697 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001698 private_handle_t *hnd = (private_handle_t *)layer->handle;
1699 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001700 if (!(layer->flags & HWC_COLOR_FILL)) {
1701 ALOGE("%s handle null", __FUNCTION__);
1702 return false;
1703 }
1704 // No PLAY for Color layer
1705 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1706 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001707 }
1708
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001709 int mdpIndex = mCurrentFrame.layerToMDP[i];
1710
radhakrishnac9a67412013-09-25 17:40:42 +05301711 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1712 {
1713 MdpYUVPipeInfo& pipe_info =
1714 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1715 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1716 ovutils::eDest indexL = pipe_info.lIndex;
1717 ovutils::eDest indexR = pipe_info.rIndex;
1718 int fd = hnd->fd;
1719 uint32_t offset = hnd->offset;
1720 if(rot) {
1721 rot->queueBuffer(fd, offset);
1722 fd = rot->getDstMemId();
1723 offset = rot->getDstOffset();
1724 }
1725 if(indexL != ovutils::OV_INVALID) {
1726 ovutils::eDest destL = (ovutils::eDest)indexL;
1727 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1728 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1729 if (!ov.queueBuffer(fd, offset, destL)) {
1730 ALOGE("%s: queueBuffer failed for display:%d",
1731 __FUNCTION__, mDpy);
1732 return false;
1733 }
1734 }
1735
1736 if(indexR != ovutils::OV_INVALID) {
1737 ovutils::eDest destR = (ovutils::eDest)indexR;
1738 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1739 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1740 if (!ov.queueBuffer(fd, offset, destR)) {
1741 ALOGE("%s: queueBuffer failed for display:%d",
1742 __FUNCTION__, mDpy);
1743 return false;
1744 }
1745 }
1746 }
1747 else{
1748 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001749 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301750 ovutils::eDest dest = pipe_info.index;
1751 if(dest == ovutils::OV_INVALID) {
1752 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001753 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301754 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001755
radhakrishnac9a67412013-09-25 17:40:42 +05301756 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1757 continue;
1758 }
1759
1760 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1761 using pipe: %d", __FUNCTION__, layer,
1762 hnd, dest );
1763
1764 int fd = hnd->fd;
1765 uint32_t offset = hnd->offset;
1766
1767 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1768 if(rot) {
1769 if(!rot->queueBuffer(fd, offset))
1770 return false;
1771 fd = rot->getDstMemId();
1772 offset = rot->getDstOffset();
1773 }
1774
1775 if (!ov.queueBuffer(fd, offset, dest)) {
1776 ALOGE("%s: queueBuffer failed for display:%d ",
1777 __FUNCTION__, mDpy);
1778 return false;
1779 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001780 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001781
1782 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001783 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001784 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001785}
1786
Saurabh Shah88e4d272013-09-03 13:31:29 -07001787//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001788
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001789void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301790 hwc_display_contents_1_t* list){
1791 //if 4kx2k yuv layer is totally present in either in left half
1792 //or right half then try splitting the yuv layer to avoid decimation
1793 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1794 const int lSplit = getLeftSplit(ctx, mDpy);
1795 for(int index = 0; index < n4k2kYuvCount; index++){
1796 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1797 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1798 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001799 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301800 mCurrentFrame.mdpCount += 1;
1801 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001802 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1803 mCurrentFrame.fbZ += 1;
1804 }
radhakrishnac9a67412013-09-25 17:40:42 +05301805 }
1806}
1807
Saurabh Shah88e4d272013-09-03 13:31:29 -07001808int MDPCompSplit::pipesNeeded(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001809 hwc_display_contents_1_t* list,
1810 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001811 int pipesNeeded = 0;
Saurabh Shah67a38c32013-06-10 16:23:15 -07001812 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001813
1814 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001815
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001816 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1817 if(!mCurrentFrame.isFBComposed[i]) {
1818 hwc_layer_1_t* layer = &list->hwLayers[i];
1819 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001820 if(mixer == Overlay::MIXER_LEFT && dst.left < lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001821 pipesNeeded++;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001822 } else if(mixer == Overlay::MIXER_RIGHT && dst.right > lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001823 pipesNeeded++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001824 }
1825 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001826 }
1827 return pipesNeeded;
1828}
1829
Saurabh Shah88e4d272013-09-03 13:31:29 -07001830bool MDPCompSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001831 hwc_display_contents_1_t* list) {
1832 overlay::Overlay& ov = *ctx->mOverlay;
Saurabh Shah082468e2013-09-12 10:05:32 -07001833 int totalPipesNeeded = 0;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001834
1835 for(int i = 0; i < Overlay::MIXER_MAX; i++) {
1836 int numPipesNeeded = pipesNeeded(ctx, list, i);
1837 int availPipes = ov.availablePipes(mDpy, i);
1838
1839 //Reserve pipe(s)for FB
1840 if(mCurrentFrame.fbCount)
Saurabh Shah082468e2013-09-12 10:05:32 -07001841 numPipesNeeded += 1;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001842
Saurabh Shah082468e2013-09-12 10:05:32 -07001843 totalPipesNeeded += numPipesNeeded;
1844
1845 //Per mixer check.
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001846 if(numPipesNeeded > availPipes) {
1847 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1848 "dpy %d mixer %d needed %d, avail %d",
1849 __FUNCTION__, mDpy, i, numPipesNeeded, availPipes);
1850 return false;
1851 }
1852 }
Saurabh Shah082468e2013-09-12 10:05:32 -07001853
1854 //Per display check, since unused pipes can get counted twice.
1855 int totalPipesAvailable = ov.availablePipes(mDpy);
1856 if(totalPipesNeeded > totalPipesAvailable) {
1857 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1858 "dpy %d needed %d, avail %d",
1859 __FUNCTION__, mDpy, totalPipesNeeded, totalPipesAvailable);
1860 return false;
1861 }
1862
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001863 if(not areVGPipesAvailable(ctx, list)) {
1864 return false;
1865 }
1866
1867 return true;
1868}
1869
1870bool MDPCompSplit::areVGPipesAvailable(hwc_context_t *ctx,
1871 hwc_display_contents_1_t* list) {
1872 overlay::Overlay& ov = *ctx->mOverlay;
1873 int pipesNeeded = 0;
1874 const int lSplit = getLeftSplit(ctx, mDpy);
1875 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1876 if(!mCurrentFrame.isFBComposed[i]) {
1877 hwc_layer_1_t* layer = &list->hwLayers[i];
1878 hwc_rect_t dst = layer->displayFrame;
1879 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301880 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1881 if((dst.left > lSplit)||(dst.right < lSplit)){
1882 pipesNeeded = pipesNeeded + 2;
1883 continue;
1884 }
1885 }
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001886 if(isYuvBuffer(hnd)) {
1887 if(dst.left < lSplit) {
1888 pipesNeeded++;
1889 }
1890 if(dst.right > lSplit) {
1891 pipesNeeded++;
1892 }
1893 }
1894 }
1895 }
1896
1897 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1898 if(pipesNeeded > availableVGPipes) {
1899 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1900 "dpy %d needed %d, avail %d",
1901 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1902 return false;
1903 }
1904
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001905 return true;
1906}
1907
Saurabh Shah88e4d272013-09-03 13:31:29 -07001908bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1909 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001910 ePipeType type) {
1911 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001912 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001913
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001914 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001915 pipe_info.lIndex = ovutils::OV_INVALID;
1916 pipe_info.rIndex = ovutils::OV_INVALID;
1917
1918 if (dst.left < lSplit) {
1919 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001920 if(pipe_info.lIndex == ovutils::OV_INVALID)
1921 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001922 }
1923
1924 if(dst.right > lSplit) {
1925 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1926 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001927 return false;
1928 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001929
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001930 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001931}
1932
Saurabh Shah88e4d272013-09-03 13:31:29 -07001933bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001934 hwc_display_contents_1_t* list) {
1935 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001936
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001937 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001938
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001939 hwc_layer_1_t* layer = &list->hwLayers[index];
1940 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301941 hwc_rect_t dst = layer->displayFrame;
1942 const int lSplit = getLeftSplit(ctx, mDpy);
1943 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1944 if((dst.left > lSplit)||(dst.right < lSplit)){
1945 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1946 continue;
1947 }
1948 }
1949 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001950 int mdpIndex = mCurrentFrame.layerToMDP[index];
1951 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001952 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001953 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001954 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001955 ePipeType type = MDPCOMP_OV_ANY;
1956
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001957 if(isYuvBuffer(hnd)) {
1958 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001959 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001960 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001961 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001962 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001963 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001964
1965 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001966 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1967 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001968 return false;
1969 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001970 }
1971 return true;
1972}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001973
radhakrishnac9a67412013-09-25 17:40:42 +05301974int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1975 PipeLayerPair& PipeLayerPair) {
1976 const int lSplit = getLeftSplit(ctx, mDpy);
1977 hwc_rect_t dst = layer->displayFrame;
1978 if((dst.left > lSplit)||(dst.right < lSplit)){
1979 MdpYUVPipeInfo& mdp_info =
1980 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1981 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1982 eIsFg isFg = IS_FG_OFF;
1983 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1984 eDest lDest = mdp_info.lIndex;
1985 eDest rDest = mdp_info.rIndex;
1986
1987 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1988 lDest, rDest, &PipeLayerPair.rot);
1989 }
1990 else{
1991 return configure(ctx, layer, PipeLayerPair);
1992 }
1993}
1994
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001995/*
1996 * Configures pipe(s) for MDP composition
1997 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001998int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001999 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07002000 MdpPipeInfoSplit& mdp_info =
2001 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08002002 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
2003 eIsFg isFg = IS_FG_OFF;
2004 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
2005 eDest lDest = mdp_info.lIndex;
2006 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002007
2008 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
2009 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
2010
Saurabh Shah88e4d272013-09-03 13:31:29 -07002011 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002012 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002013}
2014
Saurabh Shah88e4d272013-09-03 13:31:29 -07002015bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002016
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002017 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002018 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
2019 return true;
2020 }
2021
2022 if(!ctx || !list) {
2023 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002024 return false;
2025 }
2026
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05302027 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
2028 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
2029 return true;
2030 }
2031
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002032 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07002033 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08002034 idleInvalidator->handleUpdateEvent();
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002035
Naseer Ahmed54821fe2012-11-28 18:44:38 -05002036 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002037 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002038
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002039 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
2040 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002041 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002042 if(mCurrentFrame.isFBComposed[i]) continue;
2043
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002044 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08002045 private_handle_t *hnd = (private_handle_t *)layer->handle;
2046 if(!hnd) {
2047 ALOGE("%s handle null", __FUNCTION__);
2048 return false;
2049 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002050
2051 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
2052 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002053 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002054
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002055 int mdpIndex = mCurrentFrame.layerToMDP[i];
2056
radhakrishnac9a67412013-09-25 17:40:42 +05302057 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
2058 {
2059 MdpYUVPipeInfo& pipe_info =
2060 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2061 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
2062 ovutils::eDest indexL = pipe_info.lIndex;
2063 ovutils::eDest indexR = pipe_info.rIndex;
2064 int fd = hnd->fd;
2065 uint32_t offset = hnd->offset;
2066 if(rot) {
2067 rot->queueBuffer(fd, offset);
2068 fd = rot->getDstMemId();
2069 offset = rot->getDstOffset();
2070 }
2071 if(indexL != ovutils::OV_INVALID) {
2072 ovutils::eDest destL = (ovutils::eDest)indexL;
2073 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2074 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2075 if (!ov.queueBuffer(fd, offset, destL)) {
2076 ALOGE("%s: queueBuffer failed for display:%d",
2077 __FUNCTION__, mDpy);
2078 return false;
2079 }
2080 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002081
radhakrishnac9a67412013-09-25 17:40:42 +05302082 if(indexR != ovutils::OV_INVALID) {
2083 ovutils::eDest destR = (ovutils::eDest)indexR;
2084 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2085 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2086 if (!ov.queueBuffer(fd, offset, destR)) {
2087 ALOGE("%s: queueBuffer failed for display:%d",
2088 __FUNCTION__, mDpy);
2089 return false;
2090 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002091 }
2092 }
radhakrishnac9a67412013-09-25 17:40:42 +05302093 else{
2094 MdpPipeInfoSplit& pipe_info =
2095 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2096 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002097
radhakrishnac9a67412013-09-25 17:40:42 +05302098 ovutils::eDest indexL = pipe_info.lIndex;
2099 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002100
radhakrishnac9a67412013-09-25 17:40:42 +05302101 int fd = hnd->fd;
2102 int offset = hnd->offset;
2103
2104 if(ctx->mAD->isModeOn()) {
2105 if(ctx->mAD->draw(ctx, fd, offset)) {
2106 fd = ctx->mAD->getDstFd(ctx);
2107 offset = ctx->mAD->getDstOffset(ctx);
2108 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002109 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002110
radhakrishnac9a67412013-09-25 17:40:42 +05302111 if(rot) {
2112 rot->queueBuffer(fd, offset);
2113 fd = rot->getDstMemId();
2114 offset = rot->getDstOffset();
2115 }
2116
2117 //************* play left mixer **********
2118 if(indexL != ovutils::OV_INVALID) {
2119 ovutils::eDest destL = (ovutils::eDest)indexL;
2120 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2121 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2122 if (!ov.queueBuffer(fd, offset, destL)) {
2123 ALOGE("%s: queueBuffer failed for left mixer",
2124 __FUNCTION__);
2125 return false;
2126 }
2127 }
2128
2129 //************* play right mixer **********
2130 if(indexR != ovutils::OV_INVALID) {
2131 ovutils::eDest destR = (ovutils::eDest)indexR;
2132 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2133 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2134 if (!ov.queueBuffer(fd, offset, destR)) {
2135 ALOGE("%s: queueBuffer failed for right mixer",
2136 __FUNCTION__);
2137 return false;
2138 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002139 }
2140 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002141
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002142 layerProp[i].mFlags &= ~HWC_MDPCOMP;
2143 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002144
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002145 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002146}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002147}; //namespace
2148