blob: f7a3afd10e9fb39de1356488c34aa4979362d9c8 [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800236 fbZ = 0;
237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Saurabh Shah4fdde762013-04-30 18:47:33 -0700303 const int dpy = HWC_DISPLAY_PRIMARY;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800304 private_handle_t *hnd = (private_handle_t *)layer->handle;
305
306 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700307 if (layer->flags & HWC_COLOR_FILL) {
308 // Color layer
309 return true;
310 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800311 ALOGE("%s: layer handle is NULL", __FUNCTION__);
312 return false;
313 }
314
Naseer Ahmede850a802013-09-06 13:12:52 -0400315 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400316 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400317 return false;
318
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800319 int hw_w = ctx->dpyAttr[mDpy].xres;
320 int hw_h = ctx->dpyAttr[mDpy].yres;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800321
Saurabh Shah62e1d732013-09-17 10:44:05 -0700322 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700323 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700324 int crop_w = crop.right - crop.left;
325 int crop_h = crop.bottom - crop.top;
326 int dst_w = dst.right - dst.left;
327 int dst_h = dst.bottom - dst.top;
328 float w_dscale = ceilf((float)crop_w / (float)dst_w);
329 float h_dscale = ceilf((float)crop_h / (float)dst_h);
330
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800331 /* Workaround for MDP HW limitation in DSI command mode panels where
332 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
333 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530334 * There also is a HW limilation in MDP, minimum block size is 2x2
335 * Fallback to GPU if height is less than 2.
336 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800337 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800338 return false;
339
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800340 if((w_dscale > 1.0f) || (h_dscale > 1.0f)) {
341 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700342 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800343 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
344 /* Workaround for downscales larger than 4x.
345 * Will be removed once decimator block is enabled for MDSS
346 */
347 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
348 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
349 h_dscale > downscale)
350 return false;
351 } else {
352 if(w_dscale > 64 || h_dscale > 64)
353 return false;
354 }
355 } else { //A-family
356 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700357 return false;
358 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700359 }
360
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800361 return true;
362}
363
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700364ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
365 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800366 overlay::Overlay& ov = *ctx->mOverlay;
367 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
368
369 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800370 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700371 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800372 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800373 return mdp_pipe;
374 }
375 case MDPCOMP_OV_ANY:
376 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700377 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800378 if(mdp_pipe != ovutils::OV_INVALID) {
379 return mdp_pipe;
380 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800381
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800382 if(type == MDPCOMP_OV_RGB) {
383 //Requested only for RGB pipe
384 break;
385 }
386 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700387 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800388 default:
389 ALOGE("%s: Invalid pipe type",__FUNCTION__);
390 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800391 };
392 return ovutils::OV_INVALID;
393}
394
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800395bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700396 bool ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700397 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800398
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800399 if(!isEnabled()) {
400 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700401 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700402 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Jeykumar Sankaran27dee262013-08-01 17:09:54 -0700403 ctx->mVideoTransFlag && ctx->mVirtualDisplay->isConnected()) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700404 //1 Padding round to shift pipes across mixers
405 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
406 __FUNCTION__);
407 ret = false;
Jeykumar Sankaran27dee262013-08-01 17:09:54 -0700408 } else if(ctx->dpyAttr[HWC_DISPLAY_EXTERNAL].isConfiguring ||
409 ctx->dpyAttr[HWC_DISPLAY_VIRTUAL].isConfiguring) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800410 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800411 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700412 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700413 } else if(ctx->isPaddingRound) {
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700414 ctx->isPaddingRound = false;
415 ALOGD_IF(isDebug(), "%s: padding round",__FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700416 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700417 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700418 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800419}
420
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800421/*
422 * 1) Identify layers that are not visible in the updating ROI and drop them
423 * from composition.
424 * 2) If we have a scaling layers which needs cropping against generated ROI.
425 * Reset ROI to full resolution.
426 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700427bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
428 hwc_display_contents_1_t* list, hwc_rect_t roi) {
429 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
430
431 if(!isValidRect(roi))
432 return false;
433
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800434 hwc_rect_t visibleRect = roi;
435
436 for(int i = numAppLayers - 1; i >= 0; i--){
437
438 if(!isValidRect(visibleRect)) {
439 mCurrentFrame.drop[i] = true;
440 mCurrentFrame.dropCount++;
441 }
442
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700443 const hwc_layer_1_t* layer = &list->hwLayers[i];
444
445 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700446 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700447 int transform = layer->transform;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700448
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800449 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700450
451 int res_w = res.right - res.left;
452 int res_h = res.bottom - res.top;
453 int dst_w = dstRect.right - dstRect.left;
454 int dst_h = dstRect.bottom - dstRect.top;
455
456 if(!isValidRect(res)) {
457 mCurrentFrame.drop[i] = true;
458 mCurrentFrame.dropCount++;
459 }else {
460 /* Reset frame ROI when any layer which needs scaling also needs ROI
461 * cropping */
462 if((res_w != dst_w || res_h != dst_h) &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530463 needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800464 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700465 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
466 mCurrentFrame.dropCount = 0;
467 return false;
468 }
469 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800470
471 if (layer->blending == HWC_BLENDING_NONE)
472 visibleRect = deductRect(visibleRect, res);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700473 }
474 return true;
475}
476
477void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
478 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
479
480 if(!sEnablePartialFrameUpdate) {
481 return;
482 }
483
484 if(mDpy || isDisplaySplit(ctx, mDpy)){
485 ALOGE_IF(isDebug(), "%s: ROI not supported for"
486 "the (1) external / virtual display's (2) dual DSI displays",
487 __FUNCTION__);
488 return;
489 }
490
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800491 if(isSkipPresent(ctx, mDpy))
492 return;
493
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700494 if(list->flags & HWC_GEOMETRY_CHANGED)
495 return;
496
497 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
498 for(int index = 0; index < numAppLayers; index++ ) {
499 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
500 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
501 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700502 hwc_rect_t srcRect = integerizeSourceCrop(
503 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700504 int transform = list->hwLayers[index].transform;
505
506 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700507 roi = getUnion(roi, dstRect);
508 }
509 }
510
511 if(!validateAndApplyROI(ctx, list, roi)){
512 roi = (struct hwc_rect) {0, 0,
513 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
514 }
515
516 ctx->listStats[mDpy].roi.x = roi.left;
517 ctx->listStats[mDpy].roi.y = roi.top;
518 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
519 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
520
521 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
522 roi.left, roi.top, roi.right, roi.bottom);
523}
524
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800525/* Checks for conditions where all the layers marked for MDP comp cannot be
526 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800527bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800528 hwc_display_contents_1_t* list){
529
Saurabh Shahaa236822013-04-24 18:07:26 -0700530 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800531
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700532 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700533 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
534 return false;
535 }
536
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800537 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700538 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
539 __FUNCTION__,
540 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800541 return false;
542 }
543
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800544 // check for action safe flag and downscale mode which requires scaling.
545 if(ctx->dpyAttr[mDpy].mActionSafePresent
546 || ctx->dpyAttr[mDpy].mDownScaleMode) {
547 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
548 return false;
549 }
550
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800551 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800552 hwc_layer_1_t* layer = &list->hwLayers[i];
553 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800554
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700555 if(isYuvBuffer(hnd) && has90Transform(layer)) {
556 if(!canUseRotator(ctx, mDpy)) {
557 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
558 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700559 return false;
560 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800561 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530562
563 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
564 // may not need it if Gfx pre-rotation can handle all flips & rotations
565 if(qdutils::MDPVersion::getInstance().is8x26() &&
566 (ctx->dpyAttr[mDpy].xres > 1024) &&
567 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
568 (!isYuvBuffer(hnd)))
569 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800570 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700571
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700572 if(ctx->mAD->isDoable()) {
573 return false;
574 }
575
Saurabh Shahaa236822013-04-24 18:07:26 -0700576 //If all above hard conditions are met we can do full or partial MDP comp.
577 bool ret = false;
578 if(fullMDPComp(ctx, list)) {
579 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700580 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700581 ret = true;
582 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530583
Saurabh Shahaa236822013-04-24 18:07:26 -0700584 return ret;
585}
586
587bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700588 //Will benefit presentation / secondary-only layer.
589 if((mDpy > HWC_DISPLAY_PRIMARY) &&
590 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
591 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
592 return false;
593 }
594
595 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
596 for(int i = 0; i < numAppLayers; i++) {
597 hwc_layer_1_t* layer = &list->hwLayers[i];
598 if(not isSupportedForMDPComp(ctx, layer)) {
599 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
600 return false;
601 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800602
603 //For 8x26, if there is only one layer which needs scale for secondary
604 //while no scale for primary display, DMA pipe is occupied by primary.
605 //If need to fall back to GLES composition, virtual display lacks DMA
606 //pipe and error is reported.
607 if(qdutils::MDPVersion::getInstance().is8x26() &&
608 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530609 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800610 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700611 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800612
Saurabh Shahaa236822013-04-24 18:07:26 -0700613 mCurrentFrame.fbCount = 0;
614 mCurrentFrame.fbZ = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700615 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
616 sizeof(mCurrentFrame.isFBComposed));
617 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
618 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700619
radhakrishnac9a67412013-09-25 17:40:42 +0530620 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800621 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530622 }
623
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800624 if(!postHeuristicsHandling(ctx, list)) {
625 ALOGD_IF(isDebug(), "post heuristic handling failed");
626 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700627 return false;
628 }
629
Saurabh Shahaa236822013-04-24 18:07:26 -0700630 return true;
631}
632
633bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
634{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700635 if(!sEnableMixedMode) {
636 //Mixed mode is disabled. No need to even try caching.
637 return false;
638 }
639
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700640 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800641 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
642 ret = loadBasedCompPreferGPU(ctx, list) or
643 loadBasedCompPreferMDP(ctx, list) or
644 cacheBasedComp(ctx, list);
645 } else {
646 ret = cacheBasedComp(ctx, list) or
647 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800648 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700649 }
650
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700651 return ret;
652}
653
654bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
655 hwc_display_contents_1_t* list) {
656 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700657 mCurrentFrame.reset(numAppLayers);
658 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700659
660 //If an MDP marked layer is unsupported cannot do partial MDP Comp
661 for(int i = 0; i < numAppLayers; i++) {
662 if(!mCurrentFrame.isFBComposed[i]) {
663 hwc_layer_1_t* layer = &list->hwLayers[i];
664 if(not isSupportedForMDPComp(ctx, layer)) {
665 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
666 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800667 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700668 return false;
669 }
670 }
671 }
672
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700673 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530674 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700675 if(!ret) {
676 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800677 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700678 return false;
679 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700680
681 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700682
radhakrishnac9a67412013-09-25 17:40:42 +0530683 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800684 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530685 }
686
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700687 //Will benefit cases where a video has non-updating background.
688 if((mDpy > HWC_DISPLAY_PRIMARY) and
689 (mdpCount > MAX_SEC_LAYERS)) {
690 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800691 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700692 return false;
693 }
694
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800695 if(!postHeuristicsHandling(ctx, list)) {
696 ALOGD_IF(isDebug(), "post heuristic handling failed");
697 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700698 return false;
699 }
700
Saurabh Shahaa236822013-04-24 18:07:26 -0700701 return true;
702}
703
Saurabh Shahb772ae32013-11-18 15:40:02 -0800704bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700705 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800706 if(not isLoadBasedCompDoable(ctx, list)) {
707 return false;
708 }
709
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700710 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
711 mCurrentFrame.reset(numAppLayers);
712
Saurabh Shahb772ae32013-11-18 15:40:02 -0800713 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
714 mDpy, Overlay::MIXER_DEFAULT));
715 //If MDP has X possible stages, it can take X layers.
716 const int batchSize = numAppLayers - (stagesForMDP - 1); //1 for FB
717
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700718 if(batchSize <= 0) {
719 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
720 return false;
721 }
722
723 int minBatchStart = -1;
724 size_t minBatchPixelCount = SIZE_MAX;
725
726 for(int i = 0; i <= numAppLayers - batchSize; i++) {
727 uint32_t batchPixelCount = 0;
728 for(int j = i; j < i + batchSize; j++) {
729 hwc_layer_1_t* layer = &list->hwLayers[j];
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700730 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700731 batchPixelCount += (crop.right - crop.left) *
732 (crop.bottom - crop.top);
733 }
734
735 if(batchPixelCount < minBatchPixelCount) {
736 minBatchPixelCount = batchPixelCount;
737 minBatchStart = i;
738 }
739 }
740
741 if(minBatchStart < 0) {
742 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
743 __FUNCTION__, batchSize, numAppLayers);
744 return false;
745 }
746
747 for(int i = 0; i < numAppLayers; i++) {
748 if(i < minBatchStart || i >= minBatchStart + batchSize) {
749 hwc_layer_1_t* layer = &list->hwLayers[i];
750 if(not isSupportedForMDPComp(ctx, layer)) {
751 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
752 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800753 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700754 return false;
755 }
756 mCurrentFrame.isFBComposed[i] = false;
757 }
758 }
759
760 mCurrentFrame.fbZ = minBatchStart;
761 mCurrentFrame.fbCount = batchSize;
762 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - batchSize;
763
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800764 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d",
765 __FUNCTION__, mCurrentFrame.fbZ, batchSize);
766
radhakrishnac9a67412013-09-25 17:40:42 +0530767 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800768 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530769 }
770
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800771 if(!postHeuristicsHandling(ctx, list)) {
772 ALOGD_IF(isDebug(), "post heuristic handling failed");
773 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700774 return false;
775 }
776
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700777 return true;
778}
779
Saurabh Shahb772ae32013-11-18 15:40:02 -0800780bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
781 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800782 if(not isLoadBasedCompDoable(ctx, list)) {
783 return false;
784 }
785
Saurabh Shahb772ae32013-11-18 15:40:02 -0800786 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800787 mCurrentFrame.reset(numAppLayers);
788
Saurabh Shahb772ae32013-11-18 15:40:02 -0800789 //Full screen is from ib perspective, not actual full screen
790 const int bpp = 4;
791 double panelRefRate =
792 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
793
794 double bwLeft = sMaxBw - sBwClaimed;
795
796 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
797 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
798
799 const int fbBatchSize = numAppLayers - (fullScreenLayers - 1);
800 //If batch size is not at least 2, we aren't really preferring MDP, since
801 //only 1 layer going to GPU could actually translate into an entire FB
802 //needed to be fetched by MDP, thus needing more b/w rather than less.
803 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
804 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
805 return false;
806 }
807
808 //Top-most layers constitute FB batch
809 const int fbBatchStart = numAppLayers - fbBatchSize;
810
811 //Bottom-most layers constitute MDP batch
812 for(int i = 0; i < fbBatchStart; i++) {
813 hwc_layer_1_t* layer = &list->hwLayers[i];
814 if(not isSupportedForMDPComp(ctx, layer)) {
815 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
816 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800817 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800818 return false;
819 }
820 mCurrentFrame.isFBComposed[i] = false;
821 }
822
823 mCurrentFrame.fbZ = fbBatchStart;
824 mCurrentFrame.fbCount = fbBatchSize;
825 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - fbBatchSize;
826
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800827 ALOGD_IF(isDebug(), "%s: FB Z %d, num app layers %d, MDP Batch Size %d",
828 __FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
829 numAppLayers - fbBatchSize);
830
radhakrishnac9a67412013-09-25 17:40:42 +0530831 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800832 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530833 }
834
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800835 if(!postHeuristicsHandling(ctx, list)) {
836 ALOGD_IF(isDebug(), "post heuristic handling failed");
837 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800838 return false;
839 }
840
Saurabh Shahb772ae32013-11-18 15:40:02 -0800841 return true;
842}
843
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700844bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx,
845 hwc_display_contents_1_t* list) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530846 if(mDpy or isSecurePresent(ctx, mDpy) or
847 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700848 return false;
849 }
850 return true;
851}
852
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800853bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
854 hwc_display_contents_1_t* list) {
855 const bool secureOnly = true;
856 return videoOnlyComp(ctx, list, not secureOnly) or
857 videoOnlyComp(ctx, list, secureOnly);
858}
859
860bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700861 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700862 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700863
Saurabh Shahaa236822013-04-24 18:07:26 -0700864 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700865 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700866 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700867
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800868 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
869 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700870 return false;
871 }
872
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800873 /* Bail out if we are processing only secured video layers
874 * and we dont have any */
875 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800876 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800877 return false;
878 }
879
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800880 if(mCurrentFrame.fbCount)
881 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700882
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800883 if(sEnable4k2kYUVSplit){
884 adjustForSourceSplit(ctx, list);
885 }
886
887 if(!postHeuristicsHandling(ctx, list)) {
888 ALOGD_IF(isDebug(), "post heuristic handling failed");
889 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700890 return false;
891 }
892
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800893 return true;
894}
895
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800896/* Checks for conditions where YUV layers cannot be bypassed */
897bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700898 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700899 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800900 return false;
901 }
902
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700903 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
904 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
905 return false;
906 }
907
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800908 if(isSecuring(ctx, layer)) {
909 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
910 return false;
911 }
912
Saurabh Shah4fdde762013-04-30 18:47:33 -0700913 if(!isValidDimension(ctx, layer)) {
914 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
915 __FUNCTION__);
916 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800917 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700918
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400919 if(layer->planeAlpha < 0xFF) {
920 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
921 in video only mode",
922 __FUNCTION__);
923 return false;
924 }
925
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800926 return true;
927}
928
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530929/* starts at fromIndex and check for each layer to find
930 * if it it has overlapping with any Updating layer above it in zorder
931 * till the end of the batch. returns true if it finds any intersection */
932bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
933 int fromIndex, int toIndex) {
934 for(int i = fromIndex; i < toIndex; i++) {
935 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
936 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
937 return false;
938 }
939 }
940 }
941 return true;
942}
943
944/* Checks if given layer at targetLayerIndex has any
945 * intersection with all the updating layers in beween
946 * fromIndex and toIndex. Returns true if it finds intersectiion */
947bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
948 int fromIndex, int toIndex, int targetLayerIndex) {
949 for(int i = fromIndex; i <= toIndex; i++) {
950 if(!mCurrentFrame.isFBComposed[i]) {
951 if(areLayersIntersecting(&list->hwLayers[i],
952 &list->hwLayers[targetLayerIndex])) {
953 return true;
954 }
955 }
956 }
957 return false;
958}
959
960int MDPComp::getBatch(hwc_display_contents_1_t* list,
961 int& maxBatchStart, int& maxBatchEnd,
962 int& maxBatchCount) {
963 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530964 int fbZOrder =-1;
Jeykumar Sankaran5a495da2014-01-20 12:25:32 -0800965 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530966 while (i < mCurrentFrame.layerCount) {
967 int batchCount = 0;
968 int batchStart = i;
969 int batchEnd = i;
Jeykumar Sankaran5a495da2014-01-20 12:25:32 -0800970 /* Adjust batch Z order with the dropped layers so far */
971 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530972 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +0530973 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530974 while(i < mCurrentFrame.layerCount) {
975 if(!mCurrentFrame.isFBComposed[i]) {
976 if(!batchCount) {
977 i++;
978 break;
979 }
980 updatingLayersAbove++;
981 i++;
982 continue;
983 } else {
984 if(mCurrentFrame.drop[i]) {
985 i++;
Jeykumar Sankaran5a495da2014-01-20 12:25:32 -0800986 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530987 continue;
988 } else if(updatingLayersAbove <= 0) {
989 batchCount++;
990 batchEnd = i;
991 i++;
992 continue;
993 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
994
995 // We have a valid updating layer already. If layer-i not
996 // have overlapping with all updating layers in between
997 // batch-start and i, then we can add layer i to batch.
998 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
999 batchCount++;
1000 batchEnd = i;
1001 i++;
1002 continue;
1003 } else if(canPushBatchToTop(list, batchStart, i)) {
1004 //If All the non-updating layers with in this batch
1005 //does not have intersection with the updating layers
1006 //above in z-order, then we can safely move the batch to
1007 //higher z-order. Increment fbZ as it is moving up.
1008 if( firstZReverseIndex < 0) {
1009 firstZReverseIndex = i;
1010 }
1011 batchCount++;
1012 batchEnd = i;
1013 fbZ += updatingLayersAbove;
1014 i++;
1015 updatingLayersAbove = 0;
1016 continue;
1017 } else {
1018 //both failed.start the loop again from here.
1019 if(firstZReverseIndex >= 0) {
1020 i = firstZReverseIndex;
1021 }
1022 break;
1023 }
1024 }
1025 }
1026 }
1027 if(batchCount > maxBatchCount) {
1028 maxBatchCount = batchCount;
1029 maxBatchStart = batchStart;
1030 maxBatchEnd = batchEnd;
1031 fbZOrder = fbZ;
1032 }
1033 }
1034 return fbZOrder;
1035}
1036
1037bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1038 hwc_display_contents_1_t* list) {
1039 /* Idea is to keep as many non-updating(cached) layers in FB and
1040 * send rest of them through MDP. This is done in 2 steps.
1041 * 1. Find the maximum contiguous batch of non-updating layers.
1042 * 2. See if we can improve this batch size for caching by adding
1043 * opaque layers around the batch, if they don't have
1044 * any overlapping with the updating layers in between.
1045 * NEVER mark an updating layer for caching.
1046 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001047
1048 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001049 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001050 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301051 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001052
1053 /* All or Nothing is cached. No batching needed */
Saurabh Shahaa236822013-04-24 18:07:26 -07001054 if(!mCurrentFrame.fbCount) {
1055 mCurrentFrame.fbZ = -1;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001056 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001057 }
1058 if(!mCurrentFrame.mdpCount) {
1059 mCurrentFrame.fbZ = 0;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001060 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001061 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001062
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301063 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001064
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301065 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001066 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001067 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001068 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301069 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001070 if(!mCurrentFrame.drop[i]){
1071 //If an unsupported layer is being attempted to
1072 //be pulled out we should fail
1073 if(not isSupportedForMDPComp(ctx, layer)) {
1074 return false;
1075 }
1076 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001077 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001078 }
1079 }
1080
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301081 // update the frame data
1082 mCurrentFrame.fbZ = fbZ;
1083 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001084 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001085 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001086
1087 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301088 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001089
1090 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001091}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001092
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001093void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001094 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001095 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001096 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001097
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001098 for(int i = 0; i < numAppLayers; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001099 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001100 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001101 if(!mCurrentFrame.drop[i])
1102 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001103 mCurrentFrame.isFBComposed[i] = true;
1104 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001105 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001106 }
1107 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001108
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001109 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001110 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1111 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001112
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001113 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1114 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1115 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001116}
1117
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001118void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1119 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001120 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1121 for(int index = 0;index < nYuvCount; index++){
1122 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1123 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1124
1125 if(!isYUVDoable(ctx, layer)) {
1126 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1127 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1128 mCurrentFrame.fbCount++;
1129 }
1130 } else {
1131 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001132 private_handle_t *hnd = (private_handle_t *)layer->handle;
1133 if(!secureOnly || isSecureBuffer(hnd)) {
1134 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1135 mCurrentFrame.fbCount--;
1136 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001137 }
1138 }
1139 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001140
1141 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001142 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1143 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001144 mCurrentFrame.fbCount);
1145}
1146
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001147bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1148 hwc_display_contents_1_t* list) {
1149
1150 //Capability checks
1151 if(!resourceCheck(ctx, list)) {
1152 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1153 return false;
1154 }
1155
1156 //Limitations checks
1157 if(!hwLimitationsCheck(ctx, list)) {
1158 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1159 return false;
1160 }
1161
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001162 //Configure framebuffer first if applicable
1163 if(mCurrentFrame.fbZ >= 0) {
1164 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1165 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1166 __FUNCTION__);
1167 return false;
1168 }
1169 }
1170
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001171 mCurrentFrame.map();
1172
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001173 if(!allocLayerPipes(ctx, list)) {
1174 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001175 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001176 }
1177
1178 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001179 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001180 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001181 int mdpIndex = mCurrentFrame.layerToMDP[index];
1182 hwc_layer_1_t* layer = &list->hwLayers[index];
1183
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301184 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1185 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1186 mdpNextZOrder++;
1187 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001188 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1189 cur_pipe->zOrder = mdpNextZOrder++;
1190
radhakrishnac9a67412013-09-25 17:40:42 +05301191 private_handle_t *hnd = (private_handle_t *)layer->handle;
1192 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1193 if(configure4k2kYuv(ctx, layer,
1194 mCurrentFrame.mdpToLayer[mdpIndex])
1195 != 0 ){
1196 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1197 for layer %d",__FUNCTION__, index);
1198 return false;
1199 }
1200 else{
1201 mdpNextZOrder++;
1202 }
1203 continue;
1204 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001205 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1206 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301207 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001208 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001209 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001210 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001211 }
1212
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001213 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001214 return true;
1215}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001216
Saurabh Shah173f4242013-11-20 09:50:12 -08001217bool MDPComp::resourceCheck(hwc_context_t *ctx,
1218 hwc_display_contents_1_t *list) {
1219 const bool fbUsed = mCurrentFrame.fbCount;
1220 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1221 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1222 return false;
1223 }
1224
1225 if(!arePipesAvailable(ctx, list)) {
1226 return false;
1227 }
1228
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001229 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001230 if(!bandwidthCheck(ctx, size)) {
1231 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1232 return false;
1233 }
1234
1235 return true;
1236}
1237
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001238double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001239 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001240 double size = 0;
1241 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001242
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001243 //Skip for targets where no device tree value for bw is supplied
1244 if(sMaxBw <= 0.0) {
1245 return 0.0;
1246 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001247
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001248 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1249 if(!mCurrentFrame.isFBComposed[i]) {
1250 hwc_layer_1_t* layer = &list->hwLayers[i];
1251 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001252 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001253 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001254 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001255 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001256 size += (bpp * (crop.right - crop.left) *
1257 (crop.bottom - crop.top) *
1258 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1259 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001260 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001261 }
1262 }
1263
1264 if(mCurrentFrame.fbCount) {
1265 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001266 int tempw, temph;
1267 size += (getBufferSizeAndDimensions(
1268 layer->displayFrame.right - layer->displayFrame.left,
1269 layer->displayFrame.bottom - layer->displayFrame.top,
1270 HAL_PIXEL_FORMAT_RGBA_8888,
1271 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001272 }
1273
1274 return size;
1275}
1276
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001277bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1278 //Skip for targets where no device tree value for bw is supplied
1279 if(sMaxBw <= 0.0) {
1280 return true;
1281 }
1282
1283 double panelRefRate =
1284 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1285 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1286 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001287 }
1288 return true;
1289}
1290
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301291bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1292 hwc_display_contents_1_t* list) {
1293
1294 //A-family hw limitation:
1295 //If a layer need alpha scaling, MDP can not support.
1296 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1297 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1298 if(!mCurrentFrame.isFBComposed[i] &&
1299 isAlphaScaled( &list->hwLayers[i])) {
1300 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1301 return false;
1302 }
1303 }
1304 }
1305
1306 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1307 //If multiple layers requires downscaling and also they are overlapping
1308 //fall back to GPU since MDSS can not handle it.
1309 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1310 qdutils::MDPVersion::getInstance().is8x26()) {
1311 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1312 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1313 if(!mCurrentFrame.isFBComposed[i] &&
1314 isDownscaleRequired(botLayer)) {
1315 //if layer-i is marked for MDP and needs downscaling
1316 //check if any MDP layer on top of i & overlaps with layer-i
1317 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1318 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1319 if(!mCurrentFrame.isFBComposed[j] &&
1320 isDownscaleRequired(topLayer)) {
1321 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1322 topLayer->displayFrame);
1323 if(isValidRect(r))
1324 return false;
1325 }
1326 }
1327 }
1328 }
1329 }
1330 return true;
1331}
1332
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001333int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001334 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001335 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001336 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001337
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001338 //number of app layers exceeds MAX_NUM_APP_LAYERS fall back to GPU
1339 //do not cache the information for next draw cycle.
1340 if(numLayers > MAX_NUM_APP_LAYERS) {
1341 ALOGI("%s: Number of App layers exceeded the limit ",
1342 __FUNCTION__);
1343 mCachedFrame.reset();
1344 return -1;
1345 }
1346
Saurabh Shahb39f8152013-08-22 10:21:44 -07001347 //reset old data
1348 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001349 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1350 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301351
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001352 // Detect the start of animation and fall back to GPU only once to cache
1353 // all the layers in FB and display FB content untill animation completes.
1354 if(ctx->listStats[mDpy].isDisplayAnimating) {
1355 mCurrentFrame.needsRedraw = false;
1356 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1357 mCurrentFrame.needsRedraw = true;
1358 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1359 }
1360 setMDPCompLayerFlags(ctx, list);
1361 mCachedFrame.updateCounts(mCurrentFrame);
1362 ret = -1;
1363 return ret;
1364 } else {
1365 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1366 }
1367
Saurabh Shahb39f8152013-08-22 10:21:44 -07001368 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001369 if(isFrameDoable(ctx)) {
1370 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001371
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001372 //Convert from kbps to gbps
1373 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1374 if (ctx->mExtDisplay->isConnected() ||
1375 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1376 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001377 }
1378
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001379 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1380 setMDPCompLayerFlags(ctx, list);
1381 } else {
1382 reset(ctx);
1383 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1384 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001385 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001386 }
1387 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001388 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1389 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001390 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001391 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001392
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001393 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001394 ALOGD("GEOMETRY change: %d",
1395 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001396 android::String8 sDump("");
1397 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001398 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001399 }
1400
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001401 mCachedFrame.cacheAll(list);
1402 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001403 double panelRefRate =
1404 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1405 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001406 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001407}
1408
radhakrishnac9a67412013-09-25 17:40:42 +05301409bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx,
1410 hwc_display_contents_1_t* list, int index) {
1411
1412 bool bRet = true;
1413 hwc_layer_1_t* layer = &list->hwLayers[index];
1414 private_handle_t *hnd = (private_handle_t *)layer->handle;
1415 int mdpIndex = mCurrentFrame.layerToMDP[index];
1416 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1417 info.pipeInfo = new MdpYUVPipeInfo;
1418 info.rot = NULL;
1419 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1420 ePipeType type = MDPCOMP_OV_VG;
1421
1422 pipe_info.lIndex = ovutils::OV_INVALID;
1423 pipe_info.rIndex = ovutils::OV_INVALID;
1424
1425 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1426 if(pipe_info.lIndex == ovutils::OV_INVALID){
1427 bRet = false;
1428 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1429 __FUNCTION__);
1430 }
1431 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1432 if(pipe_info.rIndex == ovutils::OV_INVALID){
1433 bRet = false;
1434 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1435 __FUNCTION__);
1436 }
1437 return bRet;
1438}
Saurabh Shah88e4d272013-09-03 13:31:29 -07001439//=============MDPCompNonSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001440
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001441void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301442 hwc_display_contents_1_t* list){
1443 //As we split 4kx2k yuv layer and program to 2 VG pipes
1444 //(if available) increase mdpcount accordingly
1445 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001446
1447 //If 4k2k Yuv layer split is possible, and if
1448 //fbz is above 4k2k layer, increment fb zorder by 1
1449 //as we split 4k2k layer and increment zorder for right half
1450 //of the layer
1451 if(mCurrentFrame.fbZ >= 0) {
1452 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1453 for(int index = 0; index < n4k2kYuvCount; index++){
1454 int n4k2kYuvIndex =
1455 ctx->listStats[mDpy].yuv4k2kIndices[index];
1456 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1457 mCurrentFrame.fbZ += 1;
1458 }
1459 }
1460 }
radhakrishnac9a67412013-09-25 17:40:42 +05301461}
1462
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001463/*
1464 * Configures pipe(s) for MDP composition
1465 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001466int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001467 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001468 MdpPipeInfoNonSplit& mdp_info =
1469 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001470 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1471 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1472 eIsFg isFg = IS_FG_OFF;
1473 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001474
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001475 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1476 __FUNCTION__, layer, zOrder, dest);
1477
Saurabh Shah88e4d272013-09-03 13:31:29 -07001478 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001479 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001480}
1481
Saurabh Shah88e4d272013-09-03 13:31:29 -07001482bool MDPCompNonSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001483 hwc_display_contents_1_t* list) {
1484 overlay::Overlay& ov = *ctx->mOverlay;
1485 int numPipesNeeded = mCurrentFrame.mdpCount;
1486 int availPipes = ov.availablePipes(mDpy, Overlay::MIXER_DEFAULT);
1487
1488 //Reserve pipe for FB
1489 if(mCurrentFrame.fbCount)
1490 availPipes -= 1;
1491
1492 if(numPipesNeeded > availPipes) {
1493 ALOGD_IF(isDebug(), "%s: Insufficient pipes, dpy %d needed %d, avail %d",
1494 __FUNCTION__, mDpy, numPipesNeeded, availPipes);
1495 return false;
1496 }
1497
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001498 if(not areVGPipesAvailable(ctx, list)) {
1499 return false;
1500 }
1501
1502 return true;
1503}
1504
1505bool MDPCompNonSplit::areVGPipesAvailable(hwc_context_t *ctx,
1506 hwc_display_contents_1_t* list) {
1507 overlay::Overlay& ov = *ctx->mOverlay;
1508 int pipesNeeded = 0;
1509 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1510 if(!mCurrentFrame.isFBComposed[i]) {
1511 hwc_layer_1_t* layer = &list->hwLayers[i];
1512 hwc_rect_t dst = layer->displayFrame;
1513 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301514 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1515 pipesNeeded = pipesNeeded + 2;
1516 }
1517 else if(isYuvBuffer(hnd)) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001518 pipesNeeded++;
1519 }
1520 }
1521 }
1522
1523 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1524 if(pipesNeeded > availableVGPipes) {
1525 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1526 "dpy %d needed %d, avail %d",
1527 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1528 return false;
1529 }
1530
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001531 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001532}
1533
Saurabh Shah88e4d272013-09-03 13:31:29 -07001534bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001535 hwc_display_contents_1_t* list) {
1536 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001537
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001538 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001539
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001540 hwc_layer_1_t* layer = &list->hwLayers[index];
1541 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301542 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1543 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1544 continue;
1545 }
1546 }
1547
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001548 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001549 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001550 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001551 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001552 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001553 ePipeType type = MDPCOMP_OV_ANY;
1554
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001555 if(isYuvBuffer(hnd)) {
1556 type = MDPCOMP_OV_VG;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301557 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001558 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1559 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001560 type = MDPCOMP_OV_DMA;
1561 }
1562
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001563 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001564 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001565 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1566 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001567 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001568 }
1569 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001570 return true;
1571}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001572
radhakrishnac9a67412013-09-25 17:40:42 +05301573int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1574 PipeLayerPair& PipeLayerPair) {
1575 MdpYUVPipeInfo& mdp_info =
1576 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1577 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1578 eIsFg isFg = IS_FG_OFF;
1579 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1580 eDest lDest = mdp_info.lIndex;
1581 eDest rDest = mdp_info.rIndex;
1582
1583 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1584 lDest, rDest, &PipeLayerPair.rot);
1585}
1586
Saurabh Shah88e4d272013-09-03 13:31:29 -07001587bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001588
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001589 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001590 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1591 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001592 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001593
1594 if(!ctx || !list) {
1595 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001596 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001597 }
1598
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301599 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1600 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1601 return true;
1602 }
1603
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001604 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001605 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001606 idleInvalidator->markForSleep();
1607
1608 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001609 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001610
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001611 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1612 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001613 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001614 if(mCurrentFrame.isFBComposed[i]) continue;
1615
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001616 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001617 private_handle_t *hnd = (private_handle_t *)layer->handle;
1618 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001619 if (!(layer->flags & HWC_COLOR_FILL)) {
1620 ALOGE("%s handle null", __FUNCTION__);
1621 return false;
1622 }
1623 // No PLAY for Color layer
1624 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1625 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001626 }
1627
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001628 int mdpIndex = mCurrentFrame.layerToMDP[i];
1629
radhakrishnac9a67412013-09-25 17:40:42 +05301630 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1631 {
1632 MdpYUVPipeInfo& pipe_info =
1633 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1634 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1635 ovutils::eDest indexL = pipe_info.lIndex;
1636 ovutils::eDest indexR = pipe_info.rIndex;
1637 int fd = hnd->fd;
1638 uint32_t offset = hnd->offset;
1639 if(rot) {
1640 rot->queueBuffer(fd, offset);
1641 fd = rot->getDstMemId();
1642 offset = rot->getDstOffset();
1643 }
1644 if(indexL != ovutils::OV_INVALID) {
1645 ovutils::eDest destL = (ovutils::eDest)indexL;
1646 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1647 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1648 if (!ov.queueBuffer(fd, offset, destL)) {
1649 ALOGE("%s: queueBuffer failed for display:%d",
1650 __FUNCTION__, mDpy);
1651 return false;
1652 }
1653 }
1654
1655 if(indexR != ovutils::OV_INVALID) {
1656 ovutils::eDest destR = (ovutils::eDest)indexR;
1657 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1658 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1659 if (!ov.queueBuffer(fd, offset, destR)) {
1660 ALOGE("%s: queueBuffer failed for display:%d",
1661 __FUNCTION__, mDpy);
1662 return false;
1663 }
1664 }
1665 }
1666 else{
1667 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001668 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301669 ovutils::eDest dest = pipe_info.index;
1670 if(dest == ovutils::OV_INVALID) {
1671 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001672 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301673 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001674
radhakrishnac9a67412013-09-25 17:40:42 +05301675 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1676 continue;
1677 }
1678
1679 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1680 using pipe: %d", __FUNCTION__, layer,
1681 hnd, dest );
1682
1683 int fd = hnd->fd;
1684 uint32_t offset = hnd->offset;
1685
1686 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1687 if(rot) {
1688 if(!rot->queueBuffer(fd, offset))
1689 return false;
1690 fd = rot->getDstMemId();
1691 offset = rot->getDstOffset();
1692 }
1693
1694 if (!ov.queueBuffer(fd, offset, dest)) {
1695 ALOGE("%s: queueBuffer failed for display:%d ",
1696 __FUNCTION__, mDpy);
1697 return false;
1698 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001699 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001700
1701 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001702 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001703 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001704}
1705
Saurabh Shah88e4d272013-09-03 13:31:29 -07001706//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001707
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001708void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301709 hwc_display_contents_1_t* list){
1710 //if 4kx2k yuv layer is totally present in either in left half
1711 //or right half then try splitting the yuv layer to avoid decimation
1712 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1713 const int lSplit = getLeftSplit(ctx, mDpy);
1714 for(int index = 0; index < n4k2kYuvCount; index++){
1715 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1716 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1717 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001718 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301719 mCurrentFrame.mdpCount += 1;
1720 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001721 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1722 mCurrentFrame.fbZ += 1;
1723 }
radhakrishnac9a67412013-09-25 17:40:42 +05301724 }
1725}
1726
Saurabh Shah88e4d272013-09-03 13:31:29 -07001727int MDPCompSplit::pipesNeeded(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001728 hwc_display_contents_1_t* list,
1729 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001730 int pipesNeeded = 0;
Saurabh Shah67a38c32013-06-10 16:23:15 -07001731 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001732
1733 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001734
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001735 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1736 if(!mCurrentFrame.isFBComposed[i]) {
1737 hwc_layer_1_t* layer = &list->hwLayers[i];
1738 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001739 if(mixer == Overlay::MIXER_LEFT && dst.left < lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001740 pipesNeeded++;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001741 } else if(mixer == Overlay::MIXER_RIGHT && dst.right > lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001742 pipesNeeded++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001743 }
1744 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001745 }
1746 return pipesNeeded;
1747}
1748
Saurabh Shah88e4d272013-09-03 13:31:29 -07001749bool MDPCompSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001750 hwc_display_contents_1_t* list) {
1751 overlay::Overlay& ov = *ctx->mOverlay;
Saurabh Shah082468e2013-09-12 10:05:32 -07001752 int totalPipesNeeded = 0;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001753
1754 for(int i = 0; i < Overlay::MIXER_MAX; i++) {
1755 int numPipesNeeded = pipesNeeded(ctx, list, i);
1756 int availPipes = ov.availablePipes(mDpy, i);
1757
1758 //Reserve pipe(s)for FB
1759 if(mCurrentFrame.fbCount)
Saurabh Shah082468e2013-09-12 10:05:32 -07001760 numPipesNeeded += 1;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001761
Saurabh Shah082468e2013-09-12 10:05:32 -07001762 totalPipesNeeded += numPipesNeeded;
1763
1764 //Per mixer check.
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001765 if(numPipesNeeded > availPipes) {
1766 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1767 "dpy %d mixer %d needed %d, avail %d",
1768 __FUNCTION__, mDpy, i, numPipesNeeded, availPipes);
1769 return false;
1770 }
1771 }
Saurabh Shah082468e2013-09-12 10:05:32 -07001772
1773 //Per display check, since unused pipes can get counted twice.
1774 int totalPipesAvailable = ov.availablePipes(mDpy);
1775 if(totalPipesNeeded > totalPipesAvailable) {
1776 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1777 "dpy %d needed %d, avail %d",
1778 __FUNCTION__, mDpy, totalPipesNeeded, totalPipesAvailable);
1779 return false;
1780 }
1781
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001782 if(not areVGPipesAvailable(ctx, list)) {
1783 return false;
1784 }
1785
1786 return true;
1787}
1788
1789bool MDPCompSplit::areVGPipesAvailable(hwc_context_t *ctx,
1790 hwc_display_contents_1_t* list) {
1791 overlay::Overlay& ov = *ctx->mOverlay;
1792 int pipesNeeded = 0;
1793 const int lSplit = getLeftSplit(ctx, mDpy);
1794 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1795 if(!mCurrentFrame.isFBComposed[i]) {
1796 hwc_layer_1_t* layer = &list->hwLayers[i];
1797 hwc_rect_t dst = layer->displayFrame;
1798 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301799 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1800 if((dst.left > lSplit)||(dst.right < lSplit)){
1801 pipesNeeded = pipesNeeded + 2;
1802 continue;
1803 }
1804 }
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001805 if(isYuvBuffer(hnd)) {
1806 if(dst.left < lSplit) {
1807 pipesNeeded++;
1808 }
1809 if(dst.right > lSplit) {
1810 pipesNeeded++;
1811 }
1812 }
1813 }
1814 }
1815
1816 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1817 if(pipesNeeded > availableVGPipes) {
1818 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1819 "dpy %d needed %d, avail %d",
1820 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1821 return false;
1822 }
1823
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001824 return true;
1825}
1826
Saurabh Shah88e4d272013-09-03 13:31:29 -07001827bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1828 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001829 ePipeType type) {
1830 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001831 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001832
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001833 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001834 pipe_info.lIndex = ovutils::OV_INVALID;
1835 pipe_info.rIndex = ovutils::OV_INVALID;
1836
1837 if (dst.left < lSplit) {
1838 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001839 if(pipe_info.lIndex == ovutils::OV_INVALID)
1840 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001841 }
1842
1843 if(dst.right > lSplit) {
1844 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1845 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001846 return false;
1847 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001848
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001849 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001850}
1851
Saurabh Shah88e4d272013-09-03 13:31:29 -07001852bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001853 hwc_display_contents_1_t* list) {
1854 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001855
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001856 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001857
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001858 hwc_layer_1_t* layer = &list->hwLayers[index];
1859 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301860 hwc_rect_t dst = layer->displayFrame;
1861 const int lSplit = getLeftSplit(ctx, mDpy);
1862 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1863 if((dst.left > lSplit)||(dst.right < lSplit)){
1864 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1865 continue;
1866 }
1867 }
1868 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001869 int mdpIndex = mCurrentFrame.layerToMDP[index];
1870 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001871 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001872 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001873 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001874 ePipeType type = MDPCOMP_OV_ANY;
1875
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001876 if(isYuvBuffer(hnd)) {
1877 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001878 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001879 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001880 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001881 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001882 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001883
1884 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001885 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1886 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001887 return false;
1888 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001889 }
1890 return true;
1891}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001892
radhakrishnac9a67412013-09-25 17:40:42 +05301893int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1894 PipeLayerPair& PipeLayerPair) {
1895 const int lSplit = getLeftSplit(ctx, mDpy);
1896 hwc_rect_t dst = layer->displayFrame;
1897 if((dst.left > lSplit)||(dst.right < lSplit)){
1898 MdpYUVPipeInfo& mdp_info =
1899 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1900 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1901 eIsFg isFg = IS_FG_OFF;
1902 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1903 eDest lDest = mdp_info.lIndex;
1904 eDest rDest = mdp_info.rIndex;
1905
1906 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1907 lDest, rDest, &PipeLayerPair.rot);
1908 }
1909 else{
1910 return configure(ctx, layer, PipeLayerPair);
1911 }
1912}
1913
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001914/*
1915 * Configures pipe(s) for MDP composition
1916 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001917int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001918 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001919 MdpPipeInfoSplit& mdp_info =
1920 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001921 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1922 eIsFg isFg = IS_FG_OFF;
1923 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1924 eDest lDest = mdp_info.lIndex;
1925 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001926
1927 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
1928 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
1929
Saurabh Shah88e4d272013-09-03 13:31:29 -07001930 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001931 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001932}
1933
Saurabh Shah88e4d272013-09-03 13:31:29 -07001934bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001935
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001936 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001937 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1938 return true;
1939 }
1940
1941 if(!ctx || !list) {
1942 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001943 return false;
1944 }
1945
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301946 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1947 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1948 return true;
1949 }
1950
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001951 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001952 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001953 idleInvalidator->markForSleep();
1954
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001955 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001956 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001957
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001958 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1959 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001960 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001961 if(mCurrentFrame.isFBComposed[i]) continue;
1962
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001963 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001964 private_handle_t *hnd = (private_handle_t *)layer->handle;
1965 if(!hnd) {
1966 ALOGE("%s handle null", __FUNCTION__);
1967 return false;
1968 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001969
1970 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1971 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001972 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001973
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001974 int mdpIndex = mCurrentFrame.layerToMDP[i];
1975
radhakrishnac9a67412013-09-25 17:40:42 +05301976 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1977 {
1978 MdpYUVPipeInfo& pipe_info =
1979 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1980 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1981 ovutils::eDest indexL = pipe_info.lIndex;
1982 ovutils::eDest indexR = pipe_info.rIndex;
1983 int fd = hnd->fd;
1984 uint32_t offset = hnd->offset;
1985 if(rot) {
1986 rot->queueBuffer(fd, offset);
1987 fd = rot->getDstMemId();
1988 offset = rot->getDstOffset();
1989 }
1990 if(indexL != ovutils::OV_INVALID) {
1991 ovutils::eDest destL = (ovutils::eDest)indexL;
1992 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1993 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1994 if (!ov.queueBuffer(fd, offset, destL)) {
1995 ALOGE("%s: queueBuffer failed for display:%d",
1996 __FUNCTION__, mDpy);
1997 return false;
1998 }
1999 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002000
radhakrishnac9a67412013-09-25 17:40:42 +05302001 if(indexR != ovutils::OV_INVALID) {
2002 ovutils::eDest destR = (ovutils::eDest)indexR;
2003 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2004 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2005 if (!ov.queueBuffer(fd, offset, destR)) {
2006 ALOGE("%s: queueBuffer failed for display:%d",
2007 __FUNCTION__, mDpy);
2008 return false;
2009 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002010 }
2011 }
radhakrishnac9a67412013-09-25 17:40:42 +05302012 else{
2013 MdpPipeInfoSplit& pipe_info =
2014 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2015 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002016
radhakrishnac9a67412013-09-25 17:40:42 +05302017 ovutils::eDest indexL = pipe_info.lIndex;
2018 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002019
radhakrishnac9a67412013-09-25 17:40:42 +05302020 int fd = hnd->fd;
2021 int offset = hnd->offset;
2022
2023 if(ctx->mAD->isModeOn()) {
2024 if(ctx->mAD->draw(ctx, fd, offset)) {
2025 fd = ctx->mAD->getDstFd(ctx);
2026 offset = ctx->mAD->getDstOffset(ctx);
2027 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002028 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002029
radhakrishnac9a67412013-09-25 17:40:42 +05302030 if(rot) {
2031 rot->queueBuffer(fd, offset);
2032 fd = rot->getDstMemId();
2033 offset = rot->getDstOffset();
2034 }
2035
2036 //************* play left mixer **********
2037 if(indexL != ovutils::OV_INVALID) {
2038 ovutils::eDest destL = (ovutils::eDest)indexL;
2039 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2040 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2041 if (!ov.queueBuffer(fd, offset, destL)) {
2042 ALOGE("%s: queueBuffer failed for left mixer",
2043 __FUNCTION__);
2044 return false;
2045 }
2046 }
2047
2048 //************* play right mixer **********
2049 if(indexR != ovutils::OV_INVALID) {
2050 ovutils::eDest destR = (ovutils::eDest)indexR;
2051 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2052 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2053 if (!ov.queueBuffer(fd, offset, destR)) {
2054 ALOGE("%s: queueBuffer failed for right mixer",
2055 __FUNCTION__);
2056 return false;
2057 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002058 }
2059 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002060
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002061 layerProp[i].mFlags &= ~HWC_MDPCOMP;
2062 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002063
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002064 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002065}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002066}; //namespace
2067