blob: dbd05aeb494a60d67b7c3b22b9f1803a306d80e6 [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800236 fbZ = 0;
237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Saurabh Shah4fdde762013-04-30 18:47:33 -0700303 const int dpy = HWC_DISPLAY_PRIMARY;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800304 private_handle_t *hnd = (private_handle_t *)layer->handle;
305
306 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700307 if (layer->flags & HWC_COLOR_FILL) {
308 // Color layer
309 return true;
310 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800311 ALOGE("%s: layer handle is NULL", __FUNCTION__);
312 return false;
313 }
314
Naseer Ahmede850a802013-09-06 13:12:52 -0400315 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400316 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400317 return false;
318
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800319 int hw_w = ctx->dpyAttr[mDpy].xres;
320 int hw_h = ctx->dpyAttr[mDpy].yres;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800321
Saurabh Shah62e1d732013-09-17 10:44:05 -0700322 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700323 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700324 int crop_w = crop.right - crop.left;
325 int crop_h = crop.bottom - crop.top;
326 int dst_w = dst.right - dst.left;
327 int dst_h = dst.bottom - dst.top;
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800328 float w_scale = ((float)crop_w / (float)dst_w);
329 float h_scale = ((float)crop_h / (float)dst_h);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700330
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800331 /* Workaround for MDP HW limitation in DSI command mode panels where
332 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
333 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530334 * There also is a HW limilation in MDP, minimum block size is 2x2
335 * Fallback to GPU if height is less than 2.
336 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800337 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800338 return false;
339
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800340 if((w_scale > 1.0f) || (h_scale > 1.0f)) {
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800341 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700342 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800343 const float w_dscale = w_scale;
344 const float h_dscale = h_scale;
345
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800346 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
347 /* Workaround for downscales larger than 4x.
348 * Will be removed once decimator block is enabled for MDSS
349 */
350 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
351 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
352 h_dscale > downscale)
353 return false;
354 } else {
355 if(w_dscale > 64 || h_dscale > 64)
356 return false;
357 }
358 } else { //A-family
359 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700360 return false;
361 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700362 }
363
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800364 if((w_scale < 1.0f) || (h_scale < 1.0f)) {
365 const uint32_t upscale =
366 qdutils::MDPVersion::getInstance().getMaxMDPUpscale();
367 const float w_uscale = 1.0f / w_scale;
368 const float h_uscale = 1.0f / h_scale;
369
370 if(w_uscale > upscale || h_uscale > upscale)
371 return false;
372 }
373
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800374 return true;
375}
376
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700377ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
378 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800379 overlay::Overlay& ov = *ctx->mOverlay;
380 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
381
382 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800383 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700384 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800385 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800386 return mdp_pipe;
387 }
388 case MDPCOMP_OV_ANY:
389 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700390 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800391 if(mdp_pipe != ovutils::OV_INVALID) {
392 return mdp_pipe;
393 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800394
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800395 if(type == MDPCOMP_OV_RGB) {
396 //Requested only for RGB pipe
397 break;
398 }
399 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700400 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800401 default:
402 ALOGE("%s: Invalid pipe type",__FUNCTION__);
403 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800404 };
405 return ovutils::OV_INVALID;
406}
407
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800408bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700409 bool ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700410 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800411
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800412 if(!isEnabled()) {
413 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700414 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700415 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800416 ctx->mVideoTransFlag &&
417 isSecondaryConnected(ctx)) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700418 //1 Padding round to shift pipes across mixers
419 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
420 __FUNCTION__);
421 ret = false;
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800422 } else if(isSecondaryConfiguring(ctx)) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800423 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800424 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700425 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700426 } else if(ctx->isPaddingRound) {
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700427 ctx->isPaddingRound = false;
428 ALOGD_IF(isDebug(), "%s: padding round",__FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700429 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700430 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700431 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800432}
433
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800434/*
435 * 1) Identify layers that are not visible in the updating ROI and drop them
436 * from composition.
437 * 2) If we have a scaling layers which needs cropping against generated ROI.
438 * Reset ROI to full resolution.
439 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700440bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
441 hwc_display_contents_1_t* list, hwc_rect_t roi) {
442 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
443
444 if(!isValidRect(roi))
445 return false;
446
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800447 hwc_rect_t visibleRect = roi;
448
449 for(int i = numAppLayers - 1; i >= 0; i--){
450
451 if(!isValidRect(visibleRect)) {
452 mCurrentFrame.drop[i] = true;
453 mCurrentFrame.dropCount++;
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800454 continue;
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800455 }
456
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700457 const hwc_layer_1_t* layer = &list->hwLayers[i];
458
459 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700460 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700461 int transform = layer->transform;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700462
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800463 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700464
465 int res_w = res.right - res.left;
466 int res_h = res.bottom - res.top;
467 int dst_w = dstRect.right - dstRect.left;
468 int dst_h = dstRect.bottom - dstRect.top;
469
470 if(!isValidRect(res)) {
471 mCurrentFrame.drop[i] = true;
472 mCurrentFrame.dropCount++;
473 }else {
474 /* Reset frame ROI when any layer which needs scaling also needs ROI
475 * cropping */
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800476 if((res_w != dst_w || res_h != dst_h) && needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800477 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700478 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
479 mCurrentFrame.dropCount = 0;
480 return false;
481 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800482
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800483 /* deduct any opaque region from visibleRect */
484 if (layer->blending == HWC_BLENDING_NONE)
485 visibleRect = deductRect(visibleRect, res);
486 }
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700487 }
488 return true;
489}
490
491void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
492 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
493
494 if(!sEnablePartialFrameUpdate) {
495 return;
496 }
497
498 if(mDpy || isDisplaySplit(ctx, mDpy)){
499 ALOGE_IF(isDebug(), "%s: ROI not supported for"
500 "the (1) external / virtual display's (2) dual DSI displays",
501 __FUNCTION__);
502 return;
503 }
504
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800505 if(isSkipPresent(ctx, mDpy))
506 return;
507
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700508 if(list->flags & HWC_GEOMETRY_CHANGED)
509 return;
510
511 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
512 for(int index = 0; index < numAppLayers; index++ ) {
513 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
514 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
515 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700516 hwc_rect_t srcRect = integerizeSourceCrop(
517 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700518 int transform = list->hwLayers[index].transform;
519
520 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700521 roi = getUnion(roi, dstRect);
522 }
523 }
524
525 if(!validateAndApplyROI(ctx, list, roi)){
526 roi = (struct hwc_rect) {0, 0,
527 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
528 }
529
530 ctx->listStats[mDpy].roi.x = roi.left;
531 ctx->listStats[mDpy].roi.y = roi.top;
532 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
533 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
534
535 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
536 roi.left, roi.top, roi.right, roi.bottom);
537}
538
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800539/* Checks for conditions where all the layers marked for MDP comp cannot be
540 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800541bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800542 hwc_display_contents_1_t* list){
543
Saurabh Shahaa236822013-04-24 18:07:26 -0700544 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800545 int priDispW = ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800546
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700547 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700548 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
549 return false;
550 }
551
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800552 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700553 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
554 __FUNCTION__,
555 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800556 return false;
557 }
558
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800559 if(mDpy > HWC_DISPLAY_PRIMARY && (priDispW > MAX_DISPLAY_DIM) &&
560 (ctx->dpyAttr[mDpy].xres < MAX_DISPLAY_DIM)) {
561 // Disable MDP comp on Secondary when the primary is highres panel and
562 // the secondary is a normal 1080p, because, MDP comp on secondary under
563 // in such usecase, decimation gets used for downscale and there will be
564 // a quality mismatch when there will be a fallback to GPU comp
565 ALOGD_IF(isDebug(), "%s: Disable MDP Compositon for Secondary Disp",
566 __FUNCTION__);
567 return false;
568 }
569
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800570 // check for action safe flag and downscale mode which requires scaling.
571 if(ctx->dpyAttr[mDpy].mActionSafePresent
572 || ctx->dpyAttr[mDpy].mDownScaleMode) {
573 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
574 return false;
575 }
576
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800577 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800578 hwc_layer_1_t* layer = &list->hwLayers[i];
579 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800580
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700581 if(isYuvBuffer(hnd) && has90Transform(layer)) {
582 if(!canUseRotator(ctx, mDpy)) {
583 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
584 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700585 return false;
586 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800587 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530588
589 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
590 // may not need it if Gfx pre-rotation can handle all flips & rotations
591 if(qdutils::MDPVersion::getInstance().is8x26() &&
592 (ctx->dpyAttr[mDpy].xres > 1024) &&
593 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
594 (!isYuvBuffer(hnd)))
595 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800596 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700597
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700598 if(ctx->mAD->isDoable()) {
599 return false;
600 }
601
Saurabh Shahaa236822013-04-24 18:07:26 -0700602 //If all above hard conditions are met we can do full or partial MDP comp.
603 bool ret = false;
604 if(fullMDPComp(ctx, list)) {
605 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700606 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700607 ret = true;
608 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530609
Saurabh Shahaa236822013-04-24 18:07:26 -0700610 return ret;
611}
612
613bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700614 //Will benefit presentation / secondary-only layer.
615 if((mDpy > HWC_DISPLAY_PRIMARY) &&
616 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
617 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
618 return false;
619 }
620
621 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
622 for(int i = 0; i < numAppLayers; i++) {
623 hwc_layer_1_t* layer = &list->hwLayers[i];
624 if(not isSupportedForMDPComp(ctx, layer)) {
625 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
626 return false;
627 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800628
629 //For 8x26, if there is only one layer which needs scale for secondary
630 //while no scale for primary display, DMA pipe is occupied by primary.
631 //If need to fall back to GLES composition, virtual display lacks DMA
632 //pipe and error is reported.
633 if(qdutils::MDPVersion::getInstance().is8x26() &&
634 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530635 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800636 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700637 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800638
Saurabh Shahaa236822013-04-24 18:07:26 -0700639 mCurrentFrame.fbCount = 0;
640 mCurrentFrame.fbZ = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700641 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
642 sizeof(mCurrentFrame.isFBComposed));
643 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
644 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700645
radhakrishnac9a67412013-09-25 17:40:42 +0530646 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800647 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530648 }
649
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800650 if(!postHeuristicsHandling(ctx, list)) {
651 ALOGD_IF(isDebug(), "post heuristic handling failed");
652 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700653 return false;
654 }
655
Saurabh Shahaa236822013-04-24 18:07:26 -0700656 return true;
657}
658
659bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
660{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700661 if(!sEnableMixedMode) {
662 //Mixed mode is disabled. No need to even try caching.
663 return false;
664 }
665
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700666 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800667 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
668 ret = loadBasedCompPreferGPU(ctx, list) or
669 loadBasedCompPreferMDP(ctx, list) or
670 cacheBasedComp(ctx, list);
671 } else {
672 ret = cacheBasedComp(ctx, list) or
673 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800674 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700675 }
676
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700677 return ret;
678}
679
680bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
681 hwc_display_contents_1_t* list) {
682 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700683 mCurrentFrame.reset(numAppLayers);
684 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700685
686 //If an MDP marked layer is unsupported cannot do partial MDP Comp
687 for(int i = 0; i < numAppLayers; i++) {
688 if(!mCurrentFrame.isFBComposed[i]) {
689 hwc_layer_1_t* layer = &list->hwLayers[i];
690 if(not isSupportedForMDPComp(ctx, layer)) {
691 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
692 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800693 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700694 return false;
695 }
696 }
697 }
698
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700699 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530700 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700701 if(!ret) {
702 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800703 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700704 return false;
705 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700706
707 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700708
radhakrishnac9a67412013-09-25 17:40:42 +0530709 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800710 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530711 }
712
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700713 //Will benefit cases where a video has non-updating background.
714 if((mDpy > HWC_DISPLAY_PRIMARY) and
715 (mdpCount > MAX_SEC_LAYERS)) {
716 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800717 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700718 return false;
719 }
720
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800721 if(!postHeuristicsHandling(ctx, list)) {
722 ALOGD_IF(isDebug(), "post heuristic handling failed");
723 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700724 return false;
725 }
726
Saurabh Shahaa236822013-04-24 18:07:26 -0700727 return true;
728}
729
Saurabh Shahb772ae32013-11-18 15:40:02 -0800730bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700731 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800732 if(not isLoadBasedCompDoable(ctx, list)) {
733 return false;
734 }
735
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700736 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
737 mCurrentFrame.reset(numAppLayers);
738
Saurabh Shahb772ae32013-11-18 15:40:02 -0800739 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
740 mDpy, Overlay::MIXER_DEFAULT));
741 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800742 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
743 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800744
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700745 if(batchSize <= 0) {
746 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
747 return false;
748 }
749
750 int minBatchStart = -1;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800751 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700752 size_t minBatchPixelCount = SIZE_MAX;
753
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800754 /* Iterate through the layer list to find out a contigous batch of batchSize
755 * non-dropped layers with loweest pixel count */
756 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
757 if(mCurrentFrame.drop[i])
758 continue;
759
760 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700761 uint32_t batchPixelCount = 0;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800762 int j = i;
763 for(; j < numAppLayers && batchCount; j++){
764 if(!mCurrentFrame.drop[j]) {
765 hwc_layer_1_t* layer = &list->hwLayers[j];
766 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
767 hwc_rect_t dst = layer->displayFrame;
768
769 /* If we have a valid ROI, count pixels only for the MDP fetched
770 * region of the buffer */
771 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
772 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
773 hwc_rect_t roi;
774 roi.left = ctx->listStats[mDpy].roi.x;
775 roi.top = ctx->listStats[mDpy].roi.y;
776 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
777 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
778
779 /* valid ROI means no scaling layer is composed. So check
780 * only intersection to find actual fetched pixels */
781 crop = getIntersection(roi, dst);
782 }
783
784 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700785 (crop.bottom - crop.top);
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800786 batchCount--;
787 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700788 }
789
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800790 /* we dont want to program any batch of size lesser than batchSize */
791 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700792 minBatchPixelCount = batchPixelCount;
793 minBatchStart = i;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800794 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700795 }
796 }
797
798 if(minBatchStart < 0) {
799 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
800 __FUNCTION__, batchSize, numAppLayers);
801 return false;
802 }
803
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800804 /* non-dropped layers falling ouside the selected batch will be marked for
805 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700806 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800807 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700808 hwc_layer_1_t* layer = &list->hwLayers[i];
809 if(not isSupportedForMDPComp(ctx, layer)) {
810 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
811 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800812 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700813 return false;
814 }
815 mCurrentFrame.isFBComposed[i] = false;
816 }
817 }
818
819 mCurrentFrame.fbZ = minBatchStart;
820 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800821 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
822 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700823
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800824 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
825 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
826 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800827
radhakrishnac9a67412013-09-25 17:40:42 +0530828 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800829 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530830 }
831
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800832 if(!postHeuristicsHandling(ctx, list)) {
833 ALOGD_IF(isDebug(), "post heuristic handling failed");
834 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700835 return false;
836 }
837
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700838 return true;
839}
840
Saurabh Shahb772ae32013-11-18 15:40:02 -0800841bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
842 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800843 if(not isLoadBasedCompDoable(ctx, list)) {
844 return false;
845 }
846
Saurabh Shahb772ae32013-11-18 15:40:02 -0800847 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800848 mCurrentFrame.reset(numAppLayers);
849
Saurabh Shahb772ae32013-11-18 15:40:02 -0800850 //Full screen is from ib perspective, not actual full screen
851 const int bpp = 4;
852 double panelRefRate =
853 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
854
855 double bwLeft = sMaxBw - sBwClaimed;
856
857 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
858 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
859
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800860 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
861 - (fullScreenLayers - 1);
862
Saurabh Shahb772ae32013-11-18 15:40:02 -0800863 //If batch size is not at least 2, we aren't really preferring MDP, since
864 //only 1 layer going to GPU could actually translate into an entire FB
865 //needed to be fetched by MDP, thus needing more b/w rather than less.
866 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
867 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
868 return false;
869 }
870
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800871 //Find top fbBatchSize non-dropped layers to get your batch
872 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
873 for(int i = numAppLayers - 1; i >= 0; i--) {
874 if(mCurrentFrame.drop[i])
875 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800876
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800877 if(fbEnd < 0)
878 fbEnd = i;
879
880 if(!(--batchCount)) {
881 fbStart = i;
882 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800883 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800884 }
885
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800886 //Bottom layers constitute MDP batch
887 for(int i = 0; i < fbStart; i++) {
888 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
889 hwc_layer_1_t* layer = &list->hwLayers[i];
890 if(not isSupportedForMDPComp(ctx, layer)) {
891 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
892 __FUNCTION__, i);
893 reset(ctx);
894 return false;
895 }
896 mCurrentFrame.isFBComposed[i] = false;
897 }
898 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800899
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800900 mCurrentFrame.fbZ = fbStart;
901 mCurrentFrame.fbCount = fbBatchSize;
902 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
903 - mCurrentFrame.dropCount;
904
905 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
906 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
907 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800908
radhakrishnac9a67412013-09-25 17:40:42 +0530909 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800910 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530911 }
912
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800913 if(!postHeuristicsHandling(ctx, list)) {
914 ALOGD_IF(isDebug(), "post heuristic handling failed");
915 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800916 return false;
917 }
918
Saurabh Shahb772ae32013-11-18 15:40:02 -0800919 return true;
920}
921
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700922bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx,
923 hwc_display_contents_1_t* list) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530924 if(mDpy or isSecurePresent(ctx, mDpy) or
925 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700926 return false;
927 }
928 return true;
929}
930
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800931bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
932 hwc_display_contents_1_t* list) {
933 const bool secureOnly = true;
934 return videoOnlyComp(ctx, list, not secureOnly) or
935 videoOnlyComp(ctx, list, secureOnly);
936}
937
938bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700939 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700940 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700941
Saurabh Shahaa236822013-04-24 18:07:26 -0700942 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700943 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700944 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700945
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800946 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
947 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700948 return false;
949 }
950
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800951 /* Bail out if we are processing only secured video layers
952 * and we dont have any */
953 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800954 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800955 return false;
956 }
957
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800958 if(mCurrentFrame.fbCount)
959 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700960
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800961 if(sEnable4k2kYUVSplit){
962 adjustForSourceSplit(ctx, list);
963 }
964
965 if(!postHeuristicsHandling(ctx, list)) {
966 ALOGD_IF(isDebug(), "post heuristic handling failed");
967 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700968 return false;
969 }
970
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800971 return true;
972}
973
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800974/* Checks for conditions where YUV layers cannot be bypassed */
975bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700976 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700977 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800978 return false;
979 }
980
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700981 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
982 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
983 return false;
984 }
985
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800986 if(isSecuring(ctx, layer)) {
987 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
988 return false;
989 }
990
Saurabh Shah4fdde762013-04-30 18:47:33 -0700991 if(!isValidDimension(ctx, layer)) {
992 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
993 __FUNCTION__);
994 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800995 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700996
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400997 if(layer->planeAlpha < 0xFF) {
998 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
999 in video only mode",
1000 __FUNCTION__);
1001 return false;
1002 }
1003
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001004 return true;
1005}
1006
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301007/* starts at fromIndex and check for each layer to find
1008 * if it it has overlapping with any Updating layer above it in zorder
1009 * till the end of the batch. returns true if it finds any intersection */
1010bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
1011 int fromIndex, int toIndex) {
1012 for(int i = fromIndex; i < toIndex; i++) {
1013 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1014 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
1015 return false;
1016 }
1017 }
1018 }
1019 return true;
1020}
1021
1022/* Checks if given layer at targetLayerIndex has any
1023 * intersection with all the updating layers in beween
1024 * fromIndex and toIndex. Returns true if it finds intersectiion */
1025bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1026 int fromIndex, int toIndex, int targetLayerIndex) {
1027 for(int i = fromIndex; i <= toIndex; i++) {
1028 if(!mCurrentFrame.isFBComposed[i]) {
1029 if(areLayersIntersecting(&list->hwLayers[i],
1030 &list->hwLayers[targetLayerIndex])) {
1031 return true;
1032 }
1033 }
1034 }
1035 return false;
1036}
1037
1038int MDPComp::getBatch(hwc_display_contents_1_t* list,
1039 int& maxBatchStart, int& maxBatchEnd,
1040 int& maxBatchCount) {
1041 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301042 int fbZOrder =-1;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001043 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301044 while (i < mCurrentFrame.layerCount) {
1045 int batchCount = 0;
1046 int batchStart = i;
1047 int batchEnd = i;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001048 /* Adjust batch Z order with the dropped layers so far */
1049 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301050 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301051 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301052 while(i < mCurrentFrame.layerCount) {
1053 if(!mCurrentFrame.isFBComposed[i]) {
1054 if(!batchCount) {
1055 i++;
1056 break;
1057 }
1058 updatingLayersAbove++;
1059 i++;
1060 continue;
1061 } else {
1062 if(mCurrentFrame.drop[i]) {
1063 i++;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001064 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301065 continue;
1066 } else if(updatingLayersAbove <= 0) {
1067 batchCount++;
1068 batchEnd = i;
1069 i++;
1070 continue;
1071 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1072
1073 // We have a valid updating layer already. If layer-i not
1074 // have overlapping with all updating layers in between
1075 // batch-start and i, then we can add layer i to batch.
1076 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1077 batchCount++;
1078 batchEnd = i;
1079 i++;
1080 continue;
1081 } else if(canPushBatchToTop(list, batchStart, i)) {
1082 //If All the non-updating layers with in this batch
1083 //does not have intersection with the updating layers
1084 //above in z-order, then we can safely move the batch to
1085 //higher z-order. Increment fbZ as it is moving up.
1086 if( firstZReverseIndex < 0) {
1087 firstZReverseIndex = i;
1088 }
1089 batchCount++;
1090 batchEnd = i;
1091 fbZ += updatingLayersAbove;
1092 i++;
1093 updatingLayersAbove = 0;
1094 continue;
1095 } else {
1096 //both failed.start the loop again from here.
1097 if(firstZReverseIndex >= 0) {
1098 i = firstZReverseIndex;
1099 }
1100 break;
1101 }
1102 }
1103 }
1104 }
1105 if(batchCount > maxBatchCount) {
1106 maxBatchCount = batchCount;
1107 maxBatchStart = batchStart;
1108 maxBatchEnd = batchEnd;
1109 fbZOrder = fbZ;
1110 }
1111 }
1112 return fbZOrder;
1113}
1114
1115bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1116 hwc_display_contents_1_t* list) {
1117 /* Idea is to keep as many non-updating(cached) layers in FB and
1118 * send rest of them through MDP. This is done in 2 steps.
1119 * 1. Find the maximum contiguous batch of non-updating layers.
1120 * 2. See if we can improve this batch size for caching by adding
1121 * opaque layers around the batch, if they don't have
1122 * any overlapping with the updating layers in between.
1123 * NEVER mark an updating layer for caching.
1124 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001125
1126 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001127 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001128 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301129 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001130
1131 /* All or Nothing is cached. No batching needed */
Saurabh Shahaa236822013-04-24 18:07:26 -07001132 if(!mCurrentFrame.fbCount) {
1133 mCurrentFrame.fbZ = -1;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001134 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001135 }
1136 if(!mCurrentFrame.mdpCount) {
1137 mCurrentFrame.fbZ = 0;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001138 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001139 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001140
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301141 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001142
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301143 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001144 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001145 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001146 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301147 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001148 if(!mCurrentFrame.drop[i]){
1149 //If an unsupported layer is being attempted to
1150 //be pulled out we should fail
1151 if(not isSupportedForMDPComp(ctx, layer)) {
1152 return false;
1153 }
1154 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001155 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001156 }
1157 }
1158
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301159 // update the frame data
1160 mCurrentFrame.fbZ = fbZ;
1161 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001162 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001163 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001164
1165 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301166 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001167
1168 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001169}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001170
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001171void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001172 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001173 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001174 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001175
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001176 for(int i = 0; i < numAppLayers; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001177 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001178 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001179 if(!mCurrentFrame.drop[i])
1180 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001181 mCurrentFrame.isFBComposed[i] = true;
1182 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001183 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001184 }
1185 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001186
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001187 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001188 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1189 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001190
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001191 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1192 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1193 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001194}
1195
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001196void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1197 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001198 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1199 for(int index = 0;index < nYuvCount; index++){
1200 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1201 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1202
1203 if(!isYUVDoable(ctx, layer)) {
1204 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1205 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1206 mCurrentFrame.fbCount++;
1207 }
1208 } else {
1209 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001210 private_handle_t *hnd = (private_handle_t *)layer->handle;
1211 if(!secureOnly || isSecureBuffer(hnd)) {
1212 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1213 mCurrentFrame.fbCount--;
1214 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001215 }
1216 }
1217 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001218
1219 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001220 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1221 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001222 mCurrentFrame.fbCount);
1223}
1224
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001225bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1226 hwc_display_contents_1_t* list) {
1227
1228 //Capability checks
1229 if(!resourceCheck(ctx, list)) {
1230 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1231 return false;
1232 }
1233
1234 //Limitations checks
1235 if(!hwLimitationsCheck(ctx, list)) {
1236 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1237 return false;
1238 }
1239
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001240 //Configure framebuffer first if applicable
1241 if(mCurrentFrame.fbZ >= 0) {
1242 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1243 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1244 __FUNCTION__);
1245 return false;
1246 }
1247 }
1248
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001249 mCurrentFrame.map();
1250
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001251 if(!allocLayerPipes(ctx, list)) {
1252 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001253 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001254 }
1255
1256 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001257 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001258 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001259 int mdpIndex = mCurrentFrame.layerToMDP[index];
1260 hwc_layer_1_t* layer = &list->hwLayers[index];
1261
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301262 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1263 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1264 mdpNextZOrder++;
1265 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001266 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1267 cur_pipe->zOrder = mdpNextZOrder++;
1268
radhakrishnac9a67412013-09-25 17:40:42 +05301269 private_handle_t *hnd = (private_handle_t *)layer->handle;
1270 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1271 if(configure4k2kYuv(ctx, layer,
1272 mCurrentFrame.mdpToLayer[mdpIndex])
1273 != 0 ){
1274 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1275 for layer %d",__FUNCTION__, index);
1276 return false;
1277 }
1278 else{
1279 mdpNextZOrder++;
1280 }
1281 continue;
1282 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001283 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1284 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301285 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001286 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001287 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001288 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001289 }
1290
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001291 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001292 return true;
1293}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001294
Saurabh Shah173f4242013-11-20 09:50:12 -08001295bool MDPComp::resourceCheck(hwc_context_t *ctx,
1296 hwc_display_contents_1_t *list) {
1297 const bool fbUsed = mCurrentFrame.fbCount;
1298 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1299 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1300 return false;
1301 }
1302
1303 if(!arePipesAvailable(ctx, list)) {
1304 return false;
1305 }
1306
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001307 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001308 if(!bandwidthCheck(ctx, size)) {
1309 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1310 return false;
1311 }
1312
1313 return true;
1314}
1315
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001316double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001317 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001318 double size = 0;
1319 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001320
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001321 //Skip for targets where no device tree value for bw is supplied
1322 if(sMaxBw <= 0.0) {
1323 return 0.0;
1324 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001325
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001326 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1327 if(!mCurrentFrame.isFBComposed[i]) {
1328 hwc_layer_1_t* layer = &list->hwLayers[i];
1329 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001330 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001331 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001332 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001333 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001334 size += (bpp * (crop.right - crop.left) *
1335 (crop.bottom - crop.top) *
1336 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1337 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001338 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001339 }
1340 }
1341
1342 if(mCurrentFrame.fbCount) {
1343 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001344 int tempw, temph;
1345 size += (getBufferSizeAndDimensions(
1346 layer->displayFrame.right - layer->displayFrame.left,
1347 layer->displayFrame.bottom - layer->displayFrame.top,
1348 HAL_PIXEL_FORMAT_RGBA_8888,
1349 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001350 }
1351
1352 return size;
1353}
1354
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001355bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1356 //Skip for targets where no device tree value for bw is supplied
1357 if(sMaxBw <= 0.0) {
1358 return true;
1359 }
1360
1361 double panelRefRate =
1362 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1363 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1364 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001365 }
1366 return true;
1367}
1368
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301369bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1370 hwc_display_contents_1_t* list) {
1371
1372 //A-family hw limitation:
1373 //If a layer need alpha scaling, MDP can not support.
1374 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1375 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1376 if(!mCurrentFrame.isFBComposed[i] &&
1377 isAlphaScaled( &list->hwLayers[i])) {
1378 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1379 return false;
1380 }
1381 }
1382 }
1383
1384 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1385 //If multiple layers requires downscaling and also they are overlapping
1386 //fall back to GPU since MDSS can not handle it.
1387 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1388 qdutils::MDPVersion::getInstance().is8x26()) {
1389 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1390 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1391 if(!mCurrentFrame.isFBComposed[i] &&
1392 isDownscaleRequired(botLayer)) {
1393 //if layer-i is marked for MDP and needs downscaling
1394 //check if any MDP layer on top of i & overlaps with layer-i
1395 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1396 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1397 if(!mCurrentFrame.isFBComposed[j] &&
1398 isDownscaleRequired(topLayer)) {
1399 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1400 topLayer->displayFrame);
1401 if(isValidRect(r))
1402 return false;
1403 }
1404 }
1405 }
1406 }
1407 }
1408 return true;
1409}
1410
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001411int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001412 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001413 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001414 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001415
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001416 //number of app layers exceeds MAX_NUM_APP_LAYERS fall back to GPU
1417 //do not cache the information for next draw cycle.
1418 if(numLayers > MAX_NUM_APP_LAYERS) {
1419 ALOGI("%s: Number of App layers exceeded the limit ",
1420 __FUNCTION__);
1421 mCachedFrame.reset();
1422 return -1;
1423 }
1424
Saurabh Shahb39f8152013-08-22 10:21:44 -07001425 //reset old data
1426 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001427 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1428 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301429
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001430 // Detect the start of animation and fall back to GPU only once to cache
1431 // all the layers in FB and display FB content untill animation completes.
1432 if(ctx->listStats[mDpy].isDisplayAnimating) {
1433 mCurrentFrame.needsRedraw = false;
1434 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1435 mCurrentFrame.needsRedraw = true;
1436 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1437 }
1438 setMDPCompLayerFlags(ctx, list);
1439 mCachedFrame.updateCounts(mCurrentFrame);
1440 ret = -1;
1441 return ret;
1442 } else {
1443 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1444 }
1445
Saurabh Shahb39f8152013-08-22 10:21:44 -07001446 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001447 if(isFrameDoable(ctx)) {
1448 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001449
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001450 //Convert from kbps to gbps
1451 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1452 if (ctx->mExtDisplay->isConnected() ||
1453 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1454 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001455 }
1456
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001457 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1458 setMDPCompLayerFlags(ctx, list);
1459 } else {
1460 reset(ctx);
1461 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1462 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001463 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001464 }
1465 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001466 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1467 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001468 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001469 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001470
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001471 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001472 ALOGD("GEOMETRY change: %d",
1473 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001474 android::String8 sDump("");
1475 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001476 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001477 }
1478
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001479 mCachedFrame.cacheAll(list);
1480 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001481 double panelRefRate =
1482 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1483 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001484 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001485}
1486
radhakrishnac9a67412013-09-25 17:40:42 +05301487bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx,
1488 hwc_display_contents_1_t* list, int index) {
1489
1490 bool bRet = true;
1491 hwc_layer_1_t* layer = &list->hwLayers[index];
1492 private_handle_t *hnd = (private_handle_t *)layer->handle;
1493 int mdpIndex = mCurrentFrame.layerToMDP[index];
1494 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1495 info.pipeInfo = new MdpYUVPipeInfo;
1496 info.rot = NULL;
1497 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1498 ePipeType type = MDPCOMP_OV_VG;
1499
1500 pipe_info.lIndex = ovutils::OV_INVALID;
1501 pipe_info.rIndex = ovutils::OV_INVALID;
1502
1503 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1504 if(pipe_info.lIndex == ovutils::OV_INVALID){
1505 bRet = false;
1506 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1507 __FUNCTION__);
1508 }
1509 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1510 if(pipe_info.rIndex == ovutils::OV_INVALID){
1511 bRet = false;
1512 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1513 __FUNCTION__);
1514 }
1515 return bRet;
1516}
Saurabh Shah88e4d272013-09-03 13:31:29 -07001517//=============MDPCompNonSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001518
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001519void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301520 hwc_display_contents_1_t* list){
1521 //As we split 4kx2k yuv layer and program to 2 VG pipes
1522 //(if available) increase mdpcount accordingly
1523 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001524
1525 //If 4k2k Yuv layer split is possible, and if
1526 //fbz is above 4k2k layer, increment fb zorder by 1
1527 //as we split 4k2k layer and increment zorder for right half
1528 //of the layer
1529 if(mCurrentFrame.fbZ >= 0) {
1530 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1531 for(int index = 0; index < n4k2kYuvCount; index++){
1532 int n4k2kYuvIndex =
1533 ctx->listStats[mDpy].yuv4k2kIndices[index];
1534 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1535 mCurrentFrame.fbZ += 1;
1536 }
1537 }
1538 }
radhakrishnac9a67412013-09-25 17:40:42 +05301539}
1540
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001541/*
1542 * Configures pipe(s) for MDP composition
1543 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001544int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001545 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001546 MdpPipeInfoNonSplit& mdp_info =
1547 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001548 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1549 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1550 eIsFg isFg = IS_FG_OFF;
1551 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001552
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001553 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1554 __FUNCTION__, layer, zOrder, dest);
1555
Saurabh Shah88e4d272013-09-03 13:31:29 -07001556 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001557 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001558}
1559
Saurabh Shah88e4d272013-09-03 13:31:29 -07001560bool MDPCompNonSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001561 hwc_display_contents_1_t* list) {
1562 overlay::Overlay& ov = *ctx->mOverlay;
1563 int numPipesNeeded = mCurrentFrame.mdpCount;
1564 int availPipes = ov.availablePipes(mDpy, Overlay::MIXER_DEFAULT);
1565
1566 //Reserve pipe for FB
1567 if(mCurrentFrame.fbCount)
1568 availPipes -= 1;
1569
1570 if(numPipesNeeded > availPipes) {
1571 ALOGD_IF(isDebug(), "%s: Insufficient pipes, dpy %d needed %d, avail %d",
1572 __FUNCTION__, mDpy, numPipesNeeded, availPipes);
1573 return false;
1574 }
1575
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001576 if(not areVGPipesAvailable(ctx, list)) {
1577 return false;
1578 }
1579
1580 return true;
1581}
1582
1583bool MDPCompNonSplit::areVGPipesAvailable(hwc_context_t *ctx,
1584 hwc_display_contents_1_t* list) {
1585 overlay::Overlay& ov = *ctx->mOverlay;
1586 int pipesNeeded = 0;
1587 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1588 if(!mCurrentFrame.isFBComposed[i]) {
1589 hwc_layer_1_t* layer = &list->hwLayers[i];
1590 hwc_rect_t dst = layer->displayFrame;
1591 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301592 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1593 pipesNeeded = pipesNeeded + 2;
1594 }
1595 else if(isYuvBuffer(hnd)) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001596 pipesNeeded++;
1597 }
1598 }
1599 }
1600
1601 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1602 if(pipesNeeded > availableVGPipes) {
1603 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1604 "dpy %d needed %d, avail %d",
1605 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1606 return false;
1607 }
1608
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001609 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001610}
1611
Saurabh Shah88e4d272013-09-03 13:31:29 -07001612bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001613 hwc_display_contents_1_t* list) {
1614 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001615
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001616 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001617
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001618 hwc_layer_1_t* layer = &list->hwLayers[index];
1619 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301620 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1621 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1622 continue;
1623 }
1624 }
1625
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001626 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001627 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001628 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001629 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001630 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001631 ePipeType type = MDPCOMP_OV_ANY;
1632
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001633 if(isYuvBuffer(hnd)) {
1634 type = MDPCOMP_OV_VG;
Prabhanjan Kandula47191dc2014-01-22 23:01:45 +05301635 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
1636 (ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres > 1024)) {
1637 if(qhwc::needsScaling(layer))
1638 type = MDPCOMP_OV_RGB;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301639 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001640 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1641 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001642 type = MDPCOMP_OV_DMA;
1643 }
1644
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001645 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001646 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001647 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1648 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001649 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001650 }
1651 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001652 return true;
1653}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001654
radhakrishnac9a67412013-09-25 17:40:42 +05301655int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1656 PipeLayerPair& PipeLayerPair) {
1657 MdpYUVPipeInfo& mdp_info =
1658 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1659 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1660 eIsFg isFg = IS_FG_OFF;
1661 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1662 eDest lDest = mdp_info.lIndex;
1663 eDest rDest = mdp_info.rIndex;
1664
1665 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1666 lDest, rDest, &PipeLayerPair.rot);
1667}
1668
Saurabh Shah88e4d272013-09-03 13:31:29 -07001669bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001670
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001671 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001672 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1673 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001674 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001675
1676 if(!ctx || !list) {
1677 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001678 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001679 }
1680
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301681 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1682 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1683 return true;
1684 }
1685
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001686 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001687 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08001688 idleInvalidator->handleUpdateEvent();
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001689
1690 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001691 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001692
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001693 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1694 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001695 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001696 if(mCurrentFrame.isFBComposed[i]) continue;
1697
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001698 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001699 private_handle_t *hnd = (private_handle_t *)layer->handle;
1700 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001701 if (!(layer->flags & HWC_COLOR_FILL)) {
1702 ALOGE("%s handle null", __FUNCTION__);
1703 return false;
1704 }
1705 // No PLAY for Color layer
1706 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1707 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001708 }
1709
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001710 int mdpIndex = mCurrentFrame.layerToMDP[i];
1711
radhakrishnac9a67412013-09-25 17:40:42 +05301712 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1713 {
1714 MdpYUVPipeInfo& pipe_info =
1715 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1716 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1717 ovutils::eDest indexL = pipe_info.lIndex;
1718 ovutils::eDest indexR = pipe_info.rIndex;
1719 int fd = hnd->fd;
1720 uint32_t offset = hnd->offset;
1721 if(rot) {
1722 rot->queueBuffer(fd, offset);
1723 fd = rot->getDstMemId();
1724 offset = rot->getDstOffset();
1725 }
1726 if(indexL != ovutils::OV_INVALID) {
1727 ovutils::eDest destL = (ovutils::eDest)indexL;
1728 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1729 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1730 if (!ov.queueBuffer(fd, offset, destL)) {
1731 ALOGE("%s: queueBuffer failed for display:%d",
1732 __FUNCTION__, mDpy);
1733 return false;
1734 }
1735 }
1736
1737 if(indexR != ovutils::OV_INVALID) {
1738 ovutils::eDest destR = (ovutils::eDest)indexR;
1739 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1740 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1741 if (!ov.queueBuffer(fd, offset, destR)) {
1742 ALOGE("%s: queueBuffer failed for display:%d",
1743 __FUNCTION__, mDpy);
1744 return false;
1745 }
1746 }
1747 }
1748 else{
1749 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001750 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301751 ovutils::eDest dest = pipe_info.index;
1752 if(dest == ovutils::OV_INVALID) {
1753 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001754 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301755 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001756
radhakrishnac9a67412013-09-25 17:40:42 +05301757 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1758 continue;
1759 }
1760
1761 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1762 using pipe: %d", __FUNCTION__, layer,
1763 hnd, dest );
1764
1765 int fd = hnd->fd;
1766 uint32_t offset = hnd->offset;
1767
1768 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1769 if(rot) {
1770 if(!rot->queueBuffer(fd, offset))
1771 return false;
1772 fd = rot->getDstMemId();
1773 offset = rot->getDstOffset();
1774 }
1775
1776 if (!ov.queueBuffer(fd, offset, dest)) {
1777 ALOGE("%s: queueBuffer failed for display:%d ",
1778 __FUNCTION__, mDpy);
1779 return false;
1780 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001781 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001782
1783 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001784 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001785 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001786}
1787
Saurabh Shah88e4d272013-09-03 13:31:29 -07001788//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001789
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001790void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301791 hwc_display_contents_1_t* list){
1792 //if 4kx2k yuv layer is totally present in either in left half
1793 //or right half then try splitting the yuv layer to avoid decimation
1794 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1795 const int lSplit = getLeftSplit(ctx, mDpy);
1796 for(int index = 0; index < n4k2kYuvCount; index++){
1797 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1798 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1799 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001800 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301801 mCurrentFrame.mdpCount += 1;
1802 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001803 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1804 mCurrentFrame.fbZ += 1;
1805 }
radhakrishnac9a67412013-09-25 17:40:42 +05301806 }
1807}
1808
Saurabh Shah88e4d272013-09-03 13:31:29 -07001809int MDPCompSplit::pipesNeeded(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001810 hwc_display_contents_1_t* list,
1811 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001812 int pipesNeeded = 0;
Saurabh Shah67a38c32013-06-10 16:23:15 -07001813 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001814
1815 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001816
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001817 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1818 if(!mCurrentFrame.isFBComposed[i]) {
1819 hwc_layer_1_t* layer = &list->hwLayers[i];
1820 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001821 if(mixer == Overlay::MIXER_LEFT && dst.left < lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001822 pipesNeeded++;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001823 } else if(mixer == Overlay::MIXER_RIGHT && dst.right > lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001824 pipesNeeded++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001825 }
1826 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001827 }
1828 return pipesNeeded;
1829}
1830
Saurabh Shah88e4d272013-09-03 13:31:29 -07001831bool MDPCompSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001832 hwc_display_contents_1_t* list) {
1833 overlay::Overlay& ov = *ctx->mOverlay;
Saurabh Shah082468e2013-09-12 10:05:32 -07001834 int totalPipesNeeded = 0;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001835
1836 for(int i = 0; i < Overlay::MIXER_MAX; i++) {
1837 int numPipesNeeded = pipesNeeded(ctx, list, i);
1838 int availPipes = ov.availablePipes(mDpy, i);
1839
1840 //Reserve pipe(s)for FB
1841 if(mCurrentFrame.fbCount)
Saurabh Shah082468e2013-09-12 10:05:32 -07001842 numPipesNeeded += 1;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001843
Saurabh Shah082468e2013-09-12 10:05:32 -07001844 totalPipesNeeded += numPipesNeeded;
1845
1846 //Per mixer check.
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001847 if(numPipesNeeded > availPipes) {
1848 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1849 "dpy %d mixer %d needed %d, avail %d",
1850 __FUNCTION__, mDpy, i, numPipesNeeded, availPipes);
1851 return false;
1852 }
1853 }
Saurabh Shah082468e2013-09-12 10:05:32 -07001854
1855 //Per display check, since unused pipes can get counted twice.
1856 int totalPipesAvailable = ov.availablePipes(mDpy);
1857 if(totalPipesNeeded > totalPipesAvailable) {
1858 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1859 "dpy %d needed %d, avail %d",
1860 __FUNCTION__, mDpy, totalPipesNeeded, totalPipesAvailable);
1861 return false;
1862 }
1863
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001864 if(not areVGPipesAvailable(ctx, list)) {
1865 return false;
1866 }
1867
1868 return true;
1869}
1870
1871bool MDPCompSplit::areVGPipesAvailable(hwc_context_t *ctx,
1872 hwc_display_contents_1_t* list) {
1873 overlay::Overlay& ov = *ctx->mOverlay;
1874 int pipesNeeded = 0;
1875 const int lSplit = getLeftSplit(ctx, mDpy);
1876 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1877 if(!mCurrentFrame.isFBComposed[i]) {
1878 hwc_layer_1_t* layer = &list->hwLayers[i];
1879 hwc_rect_t dst = layer->displayFrame;
1880 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301881 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1882 if((dst.left > lSplit)||(dst.right < lSplit)){
1883 pipesNeeded = pipesNeeded + 2;
1884 continue;
1885 }
1886 }
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001887 if(isYuvBuffer(hnd)) {
1888 if(dst.left < lSplit) {
1889 pipesNeeded++;
1890 }
1891 if(dst.right > lSplit) {
1892 pipesNeeded++;
1893 }
1894 }
1895 }
1896 }
1897
1898 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1899 if(pipesNeeded > availableVGPipes) {
1900 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1901 "dpy %d needed %d, avail %d",
1902 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1903 return false;
1904 }
1905
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001906 return true;
1907}
1908
Saurabh Shah88e4d272013-09-03 13:31:29 -07001909bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1910 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001911 ePipeType type) {
1912 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001913 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001914
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001915 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001916 pipe_info.lIndex = ovutils::OV_INVALID;
1917 pipe_info.rIndex = ovutils::OV_INVALID;
1918
1919 if (dst.left < lSplit) {
1920 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001921 if(pipe_info.lIndex == ovutils::OV_INVALID)
1922 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001923 }
1924
1925 if(dst.right > lSplit) {
1926 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1927 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001928 return false;
1929 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001930
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001931 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001932}
1933
Saurabh Shah88e4d272013-09-03 13:31:29 -07001934bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001935 hwc_display_contents_1_t* list) {
1936 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001937
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001938 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001939
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001940 hwc_layer_1_t* layer = &list->hwLayers[index];
1941 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301942 hwc_rect_t dst = layer->displayFrame;
1943 const int lSplit = getLeftSplit(ctx, mDpy);
1944 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1945 if((dst.left > lSplit)||(dst.right < lSplit)){
1946 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1947 continue;
1948 }
1949 }
1950 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001951 int mdpIndex = mCurrentFrame.layerToMDP[index];
1952 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001953 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001954 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001955 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001956 ePipeType type = MDPCOMP_OV_ANY;
1957
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001958 if(isYuvBuffer(hnd)) {
1959 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001960 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001961 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001962 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001963 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001964 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001965
1966 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001967 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1968 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001969 return false;
1970 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001971 }
1972 return true;
1973}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001974
radhakrishnac9a67412013-09-25 17:40:42 +05301975int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1976 PipeLayerPair& PipeLayerPair) {
1977 const int lSplit = getLeftSplit(ctx, mDpy);
1978 hwc_rect_t dst = layer->displayFrame;
1979 if((dst.left > lSplit)||(dst.right < lSplit)){
1980 MdpYUVPipeInfo& mdp_info =
1981 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1982 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1983 eIsFg isFg = IS_FG_OFF;
1984 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1985 eDest lDest = mdp_info.lIndex;
1986 eDest rDest = mdp_info.rIndex;
1987
1988 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1989 lDest, rDest, &PipeLayerPair.rot);
1990 }
1991 else{
1992 return configure(ctx, layer, PipeLayerPair);
1993 }
1994}
1995
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001996/*
1997 * Configures pipe(s) for MDP composition
1998 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001999int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07002000 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07002001 MdpPipeInfoSplit& mdp_info =
2002 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08002003 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
2004 eIsFg isFg = IS_FG_OFF;
2005 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
2006 eDest lDest = mdp_info.lIndex;
2007 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002008
2009 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
2010 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
2011
Saurabh Shah88e4d272013-09-03 13:31:29 -07002012 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002013 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002014}
2015
Saurabh Shah88e4d272013-09-03 13:31:29 -07002016bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002017
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002018 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002019 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
2020 return true;
2021 }
2022
2023 if(!ctx || !list) {
2024 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002025 return false;
2026 }
2027
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05302028 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
2029 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
2030 return true;
2031 }
2032
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002033 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07002034 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08002035 idleInvalidator->handleUpdateEvent();
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002036
Naseer Ahmed54821fe2012-11-28 18:44:38 -05002037 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002038 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002039
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002040 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
2041 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002042 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002043 if(mCurrentFrame.isFBComposed[i]) continue;
2044
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002045 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08002046 private_handle_t *hnd = (private_handle_t *)layer->handle;
2047 if(!hnd) {
2048 ALOGE("%s handle null", __FUNCTION__);
2049 return false;
2050 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002051
2052 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
2053 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002054 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002055
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002056 int mdpIndex = mCurrentFrame.layerToMDP[i];
2057
radhakrishnac9a67412013-09-25 17:40:42 +05302058 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
2059 {
2060 MdpYUVPipeInfo& pipe_info =
2061 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2062 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
2063 ovutils::eDest indexL = pipe_info.lIndex;
2064 ovutils::eDest indexR = pipe_info.rIndex;
2065 int fd = hnd->fd;
2066 uint32_t offset = hnd->offset;
2067 if(rot) {
2068 rot->queueBuffer(fd, offset);
2069 fd = rot->getDstMemId();
2070 offset = rot->getDstOffset();
2071 }
2072 if(indexL != ovutils::OV_INVALID) {
2073 ovutils::eDest destL = (ovutils::eDest)indexL;
2074 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2075 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2076 if (!ov.queueBuffer(fd, offset, destL)) {
2077 ALOGE("%s: queueBuffer failed for display:%d",
2078 __FUNCTION__, mDpy);
2079 return false;
2080 }
2081 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002082
radhakrishnac9a67412013-09-25 17:40:42 +05302083 if(indexR != ovutils::OV_INVALID) {
2084 ovutils::eDest destR = (ovutils::eDest)indexR;
2085 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2086 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2087 if (!ov.queueBuffer(fd, offset, destR)) {
2088 ALOGE("%s: queueBuffer failed for display:%d",
2089 __FUNCTION__, mDpy);
2090 return false;
2091 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002092 }
2093 }
radhakrishnac9a67412013-09-25 17:40:42 +05302094 else{
2095 MdpPipeInfoSplit& pipe_info =
2096 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2097 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002098
radhakrishnac9a67412013-09-25 17:40:42 +05302099 ovutils::eDest indexL = pipe_info.lIndex;
2100 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002101
radhakrishnac9a67412013-09-25 17:40:42 +05302102 int fd = hnd->fd;
2103 int offset = hnd->offset;
2104
2105 if(ctx->mAD->isModeOn()) {
2106 if(ctx->mAD->draw(ctx, fd, offset)) {
2107 fd = ctx->mAD->getDstFd(ctx);
2108 offset = ctx->mAD->getDstOffset(ctx);
2109 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002110 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002111
radhakrishnac9a67412013-09-25 17:40:42 +05302112 if(rot) {
2113 rot->queueBuffer(fd, offset);
2114 fd = rot->getDstMemId();
2115 offset = rot->getDstOffset();
2116 }
2117
2118 //************* play left mixer **********
2119 if(indexL != ovutils::OV_INVALID) {
2120 ovutils::eDest destL = (ovutils::eDest)indexL;
2121 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2122 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2123 if (!ov.queueBuffer(fd, offset, destL)) {
2124 ALOGE("%s: queueBuffer failed for left mixer",
2125 __FUNCTION__);
2126 return false;
2127 }
2128 }
2129
2130 //************* play right mixer **********
2131 if(indexR != ovutils::OV_INVALID) {
2132 ovutils::eDest destR = (ovutils::eDest)indexR;
2133 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2134 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2135 if (!ov.queueBuffer(fd, offset, destR)) {
2136 ALOGE("%s: queueBuffer failed for right mixer",
2137 __FUNCTION__);
2138 return false;
2139 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002140 }
2141 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002142
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002143 layerProp[i].mFlags &= ~HWC_MDPCOMP;
2144 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002145
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002146 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002147}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002148}; //namespace
2149