blob: 9221b718037978992925f3bcf93f90405ca49c2c [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800236 fbZ = 0;
237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Saurabh Shah4fdde762013-04-30 18:47:33 -0700303 const int dpy = HWC_DISPLAY_PRIMARY;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800304 private_handle_t *hnd = (private_handle_t *)layer->handle;
305
306 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700307 if (layer->flags & HWC_COLOR_FILL) {
308 // Color layer
309 return true;
310 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800311 ALOGE("%s: layer handle is NULL", __FUNCTION__);
312 return false;
313 }
314
Naseer Ahmede850a802013-09-06 13:12:52 -0400315 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400316 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400317 return false;
318
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800319 int hw_w = ctx->dpyAttr[mDpy].xres;
320 int hw_h = ctx->dpyAttr[mDpy].yres;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800321
Saurabh Shah62e1d732013-09-17 10:44:05 -0700322 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700323 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700324 int crop_w = crop.right - crop.left;
325 int crop_h = crop.bottom - crop.top;
326 int dst_w = dst.right - dst.left;
327 int dst_h = dst.bottom - dst.top;
328 float w_dscale = ceilf((float)crop_w / (float)dst_w);
329 float h_dscale = ceilf((float)crop_h / (float)dst_h);
330
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800331 /* Workaround for MDP HW limitation in DSI command mode panels where
332 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
333 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530334 * There also is a HW limilation in MDP, minimum block size is 2x2
335 * Fallback to GPU if height is less than 2.
336 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800337 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800338 return false;
339
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800340 if((w_dscale > 1.0f) || (h_dscale > 1.0f)) {
341 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700342 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800343 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
344 /* Workaround for downscales larger than 4x.
345 * Will be removed once decimator block is enabled for MDSS
346 */
347 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
348 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
349 h_dscale > downscale)
350 return false;
351 } else {
352 if(w_dscale > 64 || h_dscale > 64)
353 return false;
354 }
355 } else { //A-family
356 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700357 return false;
358 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700359 }
360
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800361 return true;
362}
363
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700364ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
365 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800366 overlay::Overlay& ov = *ctx->mOverlay;
367 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
368
369 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800370 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700371 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800372 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800373 return mdp_pipe;
374 }
375 case MDPCOMP_OV_ANY:
376 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700377 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800378 if(mdp_pipe != ovutils::OV_INVALID) {
379 return mdp_pipe;
380 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800381
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800382 if(type == MDPCOMP_OV_RGB) {
383 //Requested only for RGB pipe
384 break;
385 }
386 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700387 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800388 default:
389 ALOGE("%s: Invalid pipe type",__FUNCTION__);
390 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800391 };
392 return ovutils::OV_INVALID;
393}
394
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800395bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700396 bool ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700397 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800398
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800399 if(!isEnabled()) {
400 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700401 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700402 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Jeykumar Sankaran27dee262013-08-01 17:09:54 -0700403 ctx->mVideoTransFlag && ctx->mVirtualDisplay->isConnected()) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700404 //1 Padding round to shift pipes across mixers
405 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
406 __FUNCTION__);
407 ret = false;
Jeykumar Sankaran27dee262013-08-01 17:09:54 -0700408 } else if(ctx->dpyAttr[HWC_DISPLAY_EXTERNAL].isConfiguring ||
409 ctx->dpyAttr[HWC_DISPLAY_VIRTUAL].isConfiguring) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800410 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800411 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700412 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700413 } else if(ctx->isPaddingRound) {
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700414 ctx->isPaddingRound = false;
415 ALOGD_IF(isDebug(), "%s: padding round",__FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700416 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700417 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700418 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800419}
420
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800421/*
422 * 1) Identify layers that are not visible in the updating ROI and drop them
423 * from composition.
424 * 2) If we have a scaling layers which needs cropping against generated ROI.
425 * Reset ROI to full resolution.
426 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700427bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
428 hwc_display_contents_1_t* list, hwc_rect_t roi) {
429 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
430
431 if(!isValidRect(roi))
432 return false;
433
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800434 hwc_rect_t visibleRect = roi;
435
436 for(int i = numAppLayers - 1; i >= 0; i--){
437
438 if(!isValidRect(visibleRect)) {
439 mCurrentFrame.drop[i] = true;
440 mCurrentFrame.dropCount++;
441 }
442
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700443 const hwc_layer_1_t* layer = &list->hwLayers[i];
444
445 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700446 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700447 int transform = layer->transform;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700448
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800449 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700450
451 int res_w = res.right - res.left;
452 int res_h = res.bottom - res.top;
453 int dst_w = dstRect.right - dstRect.left;
454 int dst_h = dstRect.bottom - dstRect.top;
455
456 if(!isValidRect(res)) {
457 mCurrentFrame.drop[i] = true;
458 mCurrentFrame.dropCount++;
459 }else {
460 /* Reset frame ROI when any layer which needs scaling also needs ROI
461 * cropping */
462 if((res_w != dst_w || res_h != dst_h) &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530463 needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800464 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700465 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
466 mCurrentFrame.dropCount = 0;
467 return false;
468 }
469 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800470
471 if (layer->blending == HWC_BLENDING_NONE)
472 visibleRect = deductRect(visibleRect, res);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700473 }
474 return true;
475}
476
477void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
478 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
479
480 if(!sEnablePartialFrameUpdate) {
481 return;
482 }
483
484 if(mDpy || isDisplaySplit(ctx, mDpy)){
485 ALOGE_IF(isDebug(), "%s: ROI not supported for"
486 "the (1) external / virtual display's (2) dual DSI displays",
487 __FUNCTION__);
488 return;
489 }
490
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800491 if(isSkipPresent(ctx, mDpy))
492 return;
493
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700494 if(list->flags & HWC_GEOMETRY_CHANGED)
495 return;
496
497 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
498 for(int index = 0; index < numAppLayers; index++ ) {
499 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
500 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
501 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700502 hwc_rect_t srcRect = integerizeSourceCrop(
503 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700504 int transform = list->hwLayers[index].transform;
505
506 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700507 roi = getUnion(roi, dstRect);
508 }
509 }
510
511 if(!validateAndApplyROI(ctx, list, roi)){
512 roi = (struct hwc_rect) {0, 0,
513 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
514 }
515
516 ctx->listStats[mDpy].roi.x = roi.left;
517 ctx->listStats[mDpy].roi.y = roi.top;
518 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
519 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
520
521 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
522 roi.left, roi.top, roi.right, roi.bottom);
523}
524
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800525/* Checks for conditions where all the layers marked for MDP comp cannot be
526 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800527bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800528 hwc_display_contents_1_t* list){
529
Saurabh Shahaa236822013-04-24 18:07:26 -0700530 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800531
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700532 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700533 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
534 return false;
535 }
536
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800537 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700538 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
539 __FUNCTION__,
540 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800541 return false;
542 }
543
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800544 // check for action safe flag and downscale mode which requires scaling.
545 if(ctx->dpyAttr[mDpy].mActionSafePresent
546 || ctx->dpyAttr[mDpy].mDownScaleMode) {
547 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
548 return false;
549 }
550
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800551 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800552 hwc_layer_1_t* layer = &list->hwLayers[i];
553 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800554
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700555 if(isYuvBuffer(hnd) && has90Transform(layer)) {
556 if(!canUseRotator(ctx, mDpy)) {
557 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
558 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700559 return false;
560 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800561 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530562
563 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
564 // may not need it if Gfx pre-rotation can handle all flips & rotations
565 if(qdutils::MDPVersion::getInstance().is8x26() &&
566 (ctx->dpyAttr[mDpy].xres > 1024) &&
567 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
568 (!isYuvBuffer(hnd)))
569 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800570 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700571
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700572 if(ctx->mAD->isDoable()) {
573 return false;
574 }
575
Saurabh Shahaa236822013-04-24 18:07:26 -0700576 //If all above hard conditions are met we can do full or partial MDP comp.
577 bool ret = false;
578 if(fullMDPComp(ctx, list)) {
579 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700580 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700581 ret = true;
582 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530583
Saurabh Shahaa236822013-04-24 18:07:26 -0700584 return ret;
585}
586
587bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700588 //Will benefit presentation / secondary-only layer.
589 if((mDpy > HWC_DISPLAY_PRIMARY) &&
590 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
591 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
592 return false;
593 }
594
595 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
596 for(int i = 0; i < numAppLayers; i++) {
597 hwc_layer_1_t* layer = &list->hwLayers[i];
598 if(not isSupportedForMDPComp(ctx, layer)) {
599 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
600 return false;
601 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800602
603 //For 8x26, if there is only one layer which needs scale for secondary
604 //while no scale for primary display, DMA pipe is occupied by primary.
605 //If need to fall back to GLES composition, virtual display lacks DMA
606 //pipe and error is reported.
607 if(qdutils::MDPVersion::getInstance().is8x26() &&
608 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530609 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800610 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700611 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800612
Saurabh Shahaa236822013-04-24 18:07:26 -0700613 mCurrentFrame.fbCount = 0;
614 mCurrentFrame.fbZ = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700615 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
616 sizeof(mCurrentFrame.isFBComposed));
617 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
618 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700619
radhakrishnac9a67412013-09-25 17:40:42 +0530620 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800621 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530622 }
623
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800624 if(!postHeuristicsHandling(ctx, list)) {
625 ALOGD_IF(isDebug(), "post heuristic handling failed");
626 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700627 return false;
628 }
629
Saurabh Shahaa236822013-04-24 18:07:26 -0700630 return true;
631}
632
633bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
634{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700635 if(!sEnableMixedMode) {
636 //Mixed mode is disabled. No need to even try caching.
637 return false;
638 }
639
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700640 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800641 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
642 ret = loadBasedCompPreferGPU(ctx, list) or
643 loadBasedCompPreferMDP(ctx, list) or
644 cacheBasedComp(ctx, list);
645 } else {
646 ret = cacheBasedComp(ctx, list) or
647 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800648 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700649 }
650
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700651 return ret;
652}
653
654bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
655 hwc_display_contents_1_t* list) {
656 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700657 mCurrentFrame.reset(numAppLayers);
658 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700659
660 //If an MDP marked layer is unsupported cannot do partial MDP Comp
661 for(int i = 0; i < numAppLayers; i++) {
662 if(!mCurrentFrame.isFBComposed[i]) {
663 hwc_layer_1_t* layer = &list->hwLayers[i];
664 if(not isSupportedForMDPComp(ctx, layer)) {
665 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
666 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800667 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700668 return false;
669 }
670 }
671 }
672
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700673 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530674 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700675 if(!ret) {
676 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800677 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700678 return false;
679 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700680
681 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700682
radhakrishnac9a67412013-09-25 17:40:42 +0530683 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800684 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530685 }
686
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700687 //Will benefit cases where a video has non-updating background.
688 if((mDpy > HWC_DISPLAY_PRIMARY) and
689 (mdpCount > MAX_SEC_LAYERS)) {
690 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800691 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700692 return false;
693 }
694
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800695 if(!postHeuristicsHandling(ctx, list)) {
696 ALOGD_IF(isDebug(), "post heuristic handling failed");
697 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700698 return false;
699 }
700
Saurabh Shahaa236822013-04-24 18:07:26 -0700701 return true;
702}
703
Saurabh Shahb772ae32013-11-18 15:40:02 -0800704bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700705 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800706 if(not isLoadBasedCompDoable(ctx, list)) {
707 return false;
708 }
709
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700710 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
711 mCurrentFrame.reset(numAppLayers);
712
Saurabh Shahb772ae32013-11-18 15:40:02 -0800713 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
714 mDpy, Overlay::MIXER_DEFAULT));
715 //If MDP has X possible stages, it can take X layers.
716 const int batchSize = numAppLayers - (stagesForMDP - 1); //1 for FB
717
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700718 if(batchSize <= 0) {
719 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
720 return false;
721 }
722
723 int minBatchStart = -1;
724 size_t minBatchPixelCount = SIZE_MAX;
725
726 for(int i = 0; i <= numAppLayers - batchSize; i++) {
727 uint32_t batchPixelCount = 0;
728 for(int j = i; j < i + batchSize; j++) {
729 hwc_layer_1_t* layer = &list->hwLayers[j];
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700730 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700731 batchPixelCount += (crop.right - crop.left) *
732 (crop.bottom - crop.top);
733 }
734
735 if(batchPixelCount < minBatchPixelCount) {
736 minBatchPixelCount = batchPixelCount;
737 minBatchStart = i;
738 }
739 }
740
741 if(minBatchStart < 0) {
742 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
743 __FUNCTION__, batchSize, numAppLayers);
744 return false;
745 }
746
747 for(int i = 0; i < numAppLayers; i++) {
748 if(i < minBatchStart || i >= minBatchStart + batchSize) {
749 hwc_layer_1_t* layer = &list->hwLayers[i];
750 if(not isSupportedForMDPComp(ctx, layer)) {
751 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
752 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800753 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700754 return false;
755 }
756 mCurrentFrame.isFBComposed[i] = false;
757 }
758 }
759
760 mCurrentFrame.fbZ = minBatchStart;
761 mCurrentFrame.fbCount = batchSize;
762 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - batchSize;
763
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800764 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d",
765 __FUNCTION__, mCurrentFrame.fbZ, batchSize);
766
radhakrishnac9a67412013-09-25 17:40:42 +0530767 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800768 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530769 }
770
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800771 if(!postHeuristicsHandling(ctx, list)) {
772 ALOGD_IF(isDebug(), "post heuristic handling failed");
773 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700774 return false;
775 }
776
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700777 return true;
778}
779
Saurabh Shahb772ae32013-11-18 15:40:02 -0800780bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
781 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800782 if(not isLoadBasedCompDoable(ctx, list)) {
783 return false;
784 }
785
Saurabh Shahb772ae32013-11-18 15:40:02 -0800786 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800787 mCurrentFrame.reset(numAppLayers);
788
Saurabh Shahb772ae32013-11-18 15:40:02 -0800789 //Full screen is from ib perspective, not actual full screen
790 const int bpp = 4;
791 double panelRefRate =
792 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
793
794 double bwLeft = sMaxBw - sBwClaimed;
795
796 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
797 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
798
799 const int fbBatchSize = numAppLayers - (fullScreenLayers - 1);
800 //If batch size is not at least 2, we aren't really preferring MDP, since
801 //only 1 layer going to GPU could actually translate into an entire FB
802 //needed to be fetched by MDP, thus needing more b/w rather than less.
803 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
804 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
805 return false;
806 }
807
808 //Top-most layers constitute FB batch
809 const int fbBatchStart = numAppLayers - fbBatchSize;
810
811 //Bottom-most layers constitute MDP batch
812 for(int i = 0; i < fbBatchStart; i++) {
813 hwc_layer_1_t* layer = &list->hwLayers[i];
814 if(not isSupportedForMDPComp(ctx, layer)) {
815 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
816 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800817 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800818 return false;
819 }
820 mCurrentFrame.isFBComposed[i] = false;
821 }
822
823 mCurrentFrame.fbZ = fbBatchStart;
824 mCurrentFrame.fbCount = fbBatchSize;
825 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - fbBatchSize;
826
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800827 ALOGD_IF(isDebug(), "%s: FB Z %d, num app layers %d, MDP Batch Size %d",
828 __FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
829 numAppLayers - fbBatchSize);
830
radhakrishnac9a67412013-09-25 17:40:42 +0530831 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800832 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530833 }
834
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800835 if(!postHeuristicsHandling(ctx, list)) {
836 ALOGD_IF(isDebug(), "post heuristic handling failed");
837 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800838 return false;
839 }
840
Saurabh Shahb772ae32013-11-18 15:40:02 -0800841 return true;
842}
843
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700844bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx,
845 hwc_display_contents_1_t* list) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530846 if(mDpy or isSecurePresent(ctx, mDpy) or
847 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700848 return false;
849 }
850 return true;
851}
852
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800853bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
854 hwc_display_contents_1_t* list) {
855 const bool secureOnly = true;
856 return videoOnlyComp(ctx, list, not secureOnly) or
857 videoOnlyComp(ctx, list, secureOnly);
858}
859
860bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700861 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700862 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700863
Saurabh Shahaa236822013-04-24 18:07:26 -0700864 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700865 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700866 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700867
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800868 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
869 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700870 return false;
871 }
872
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800873 /* Bail out if we are processing only secured video layers
874 * and we dont have any */
875 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800876 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800877 return false;
878 }
879
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800880 if(mCurrentFrame.fbCount)
881 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700882
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800883 if(sEnable4k2kYUVSplit){
884 adjustForSourceSplit(ctx, list);
885 }
886
887 if(!postHeuristicsHandling(ctx, list)) {
888 ALOGD_IF(isDebug(), "post heuristic handling failed");
889 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700890 return false;
891 }
892
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800893 return true;
894}
895
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800896/* Checks for conditions where YUV layers cannot be bypassed */
897bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700898 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700899 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800900 return false;
901 }
902
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700903 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
904 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
905 return false;
906 }
907
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800908 if(isSecuring(ctx, layer)) {
909 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
910 return false;
911 }
912
Saurabh Shah4fdde762013-04-30 18:47:33 -0700913 if(!isValidDimension(ctx, layer)) {
914 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
915 __FUNCTION__);
916 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800917 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700918
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400919 if(layer->planeAlpha < 0xFF) {
920 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
921 in video only mode",
922 __FUNCTION__);
923 return false;
924 }
925
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800926 return true;
927}
928
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530929/* starts at fromIndex and check for each layer to find
930 * if it it has overlapping with any Updating layer above it in zorder
931 * till the end of the batch. returns true if it finds any intersection */
932bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
933 int fromIndex, int toIndex) {
934 for(int i = fromIndex; i < toIndex; i++) {
935 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
936 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
937 return false;
938 }
939 }
940 }
941 return true;
942}
943
944/* Checks if given layer at targetLayerIndex has any
945 * intersection with all the updating layers in beween
946 * fromIndex and toIndex. Returns true if it finds intersectiion */
947bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
948 int fromIndex, int toIndex, int targetLayerIndex) {
949 for(int i = fromIndex; i <= toIndex; i++) {
950 if(!mCurrentFrame.isFBComposed[i]) {
951 if(areLayersIntersecting(&list->hwLayers[i],
952 &list->hwLayers[targetLayerIndex])) {
953 return true;
954 }
955 }
956 }
957 return false;
958}
959
960int MDPComp::getBatch(hwc_display_contents_1_t* list,
961 int& maxBatchStart, int& maxBatchEnd,
962 int& maxBatchCount) {
963 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530964 int fbZOrder =-1;
965 while (i < mCurrentFrame.layerCount) {
966 int batchCount = 0;
967 int batchStart = i;
968 int batchEnd = i;
969 int fbZ = batchStart;
970 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +0530971 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530972 while(i < mCurrentFrame.layerCount) {
973 if(!mCurrentFrame.isFBComposed[i]) {
974 if(!batchCount) {
975 i++;
976 break;
977 }
978 updatingLayersAbove++;
979 i++;
980 continue;
981 } else {
982 if(mCurrentFrame.drop[i]) {
983 i++;
984 continue;
985 } else if(updatingLayersAbove <= 0) {
986 batchCount++;
987 batchEnd = i;
988 i++;
989 continue;
990 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
991
992 // We have a valid updating layer already. If layer-i not
993 // have overlapping with all updating layers in between
994 // batch-start and i, then we can add layer i to batch.
995 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
996 batchCount++;
997 batchEnd = i;
998 i++;
999 continue;
1000 } else if(canPushBatchToTop(list, batchStart, i)) {
1001 //If All the non-updating layers with in this batch
1002 //does not have intersection with the updating layers
1003 //above in z-order, then we can safely move the batch to
1004 //higher z-order. Increment fbZ as it is moving up.
1005 if( firstZReverseIndex < 0) {
1006 firstZReverseIndex = i;
1007 }
1008 batchCount++;
1009 batchEnd = i;
1010 fbZ += updatingLayersAbove;
1011 i++;
1012 updatingLayersAbove = 0;
1013 continue;
1014 } else {
1015 //both failed.start the loop again from here.
1016 if(firstZReverseIndex >= 0) {
1017 i = firstZReverseIndex;
1018 }
1019 break;
1020 }
1021 }
1022 }
1023 }
1024 if(batchCount > maxBatchCount) {
1025 maxBatchCount = batchCount;
1026 maxBatchStart = batchStart;
1027 maxBatchEnd = batchEnd;
1028 fbZOrder = fbZ;
1029 }
1030 }
1031 return fbZOrder;
1032}
1033
1034bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1035 hwc_display_contents_1_t* list) {
1036 /* Idea is to keep as many non-updating(cached) layers in FB and
1037 * send rest of them through MDP. This is done in 2 steps.
1038 * 1. Find the maximum contiguous batch of non-updating layers.
1039 * 2. See if we can improve this batch size for caching by adding
1040 * opaque layers around the batch, if they don't have
1041 * any overlapping with the updating layers in between.
1042 * NEVER mark an updating layer for caching.
1043 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001044
1045 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001046 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001047 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301048 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001049
1050 /* All or Nothing is cached. No batching needed */
Saurabh Shahaa236822013-04-24 18:07:26 -07001051 if(!mCurrentFrame.fbCount) {
1052 mCurrentFrame.fbZ = -1;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001053 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001054 }
1055 if(!mCurrentFrame.mdpCount) {
1056 mCurrentFrame.fbZ = 0;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001057 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001058 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001059
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301060 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001061
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301062 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001063 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001064 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001065 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301066 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001067 if(!mCurrentFrame.drop[i]){
1068 //If an unsupported layer is being attempted to
1069 //be pulled out we should fail
1070 if(not isSupportedForMDPComp(ctx, layer)) {
1071 return false;
1072 }
1073 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001074 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001075 }
1076 }
1077
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301078 // update the frame data
1079 mCurrentFrame.fbZ = fbZ;
1080 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001081 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001082 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001083
1084 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301085 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001086
1087 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001088}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001089
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001090void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001091 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001092 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001093 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001094
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001095 for(int i = 0; i < numAppLayers; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001096 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001097 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001098 if(!mCurrentFrame.drop[i])
1099 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001100 mCurrentFrame.isFBComposed[i] = true;
1101 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001102 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001103 }
1104 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001105
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001106 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001107 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1108 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001109
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001110 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1111 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1112 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001113}
1114
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001115void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1116 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001117 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1118 for(int index = 0;index < nYuvCount; index++){
1119 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1120 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1121
1122 if(!isYUVDoable(ctx, layer)) {
1123 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1124 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1125 mCurrentFrame.fbCount++;
1126 }
1127 } else {
1128 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001129 private_handle_t *hnd = (private_handle_t *)layer->handle;
1130 if(!secureOnly || isSecureBuffer(hnd)) {
1131 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1132 mCurrentFrame.fbCount--;
1133 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001134 }
1135 }
1136 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001137
1138 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001139 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1140 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001141 mCurrentFrame.fbCount);
1142}
1143
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001144bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1145 hwc_display_contents_1_t* list) {
1146
1147 //Capability checks
1148 if(!resourceCheck(ctx, list)) {
1149 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1150 return false;
1151 }
1152
1153 //Limitations checks
1154 if(!hwLimitationsCheck(ctx, list)) {
1155 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1156 return false;
1157 }
1158
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001159 //Configure framebuffer first if applicable
1160 if(mCurrentFrame.fbZ >= 0) {
1161 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1162 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1163 __FUNCTION__);
1164 return false;
1165 }
1166 }
1167
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001168 mCurrentFrame.map();
1169
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001170 if(!allocLayerPipes(ctx, list)) {
1171 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001172 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001173 }
1174
1175 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001176 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001177 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001178 int mdpIndex = mCurrentFrame.layerToMDP[index];
1179 hwc_layer_1_t* layer = &list->hwLayers[index];
1180
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301181 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1182 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1183 mdpNextZOrder++;
1184 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001185 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1186 cur_pipe->zOrder = mdpNextZOrder++;
1187
radhakrishnac9a67412013-09-25 17:40:42 +05301188 private_handle_t *hnd = (private_handle_t *)layer->handle;
1189 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1190 if(configure4k2kYuv(ctx, layer,
1191 mCurrentFrame.mdpToLayer[mdpIndex])
1192 != 0 ){
1193 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1194 for layer %d",__FUNCTION__, index);
1195 return false;
1196 }
1197 else{
1198 mdpNextZOrder++;
1199 }
1200 continue;
1201 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001202 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1203 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301204 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001205 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001206 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001207 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001208 }
1209
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001210 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001211 return true;
1212}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001213
Saurabh Shah173f4242013-11-20 09:50:12 -08001214bool MDPComp::resourceCheck(hwc_context_t *ctx,
1215 hwc_display_contents_1_t *list) {
1216 const bool fbUsed = mCurrentFrame.fbCount;
1217 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1218 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1219 return false;
1220 }
1221
1222 if(!arePipesAvailable(ctx, list)) {
1223 return false;
1224 }
1225
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001226 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001227 if(!bandwidthCheck(ctx, size)) {
1228 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1229 return false;
1230 }
1231
1232 return true;
1233}
1234
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001235double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001236 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001237 double size = 0;
1238 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001239
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001240 //Skip for targets where no device tree value for bw is supplied
1241 if(sMaxBw <= 0.0) {
1242 return 0.0;
1243 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001244
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001245 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1246 if(!mCurrentFrame.isFBComposed[i]) {
1247 hwc_layer_1_t* layer = &list->hwLayers[i];
1248 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001249 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001250 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001251 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001252 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001253 size += (bpp * (crop.right - crop.left) *
1254 (crop.bottom - crop.top) *
1255 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1256 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001257 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001258 }
1259 }
1260
1261 if(mCurrentFrame.fbCount) {
1262 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001263 int tempw, temph;
1264 size += (getBufferSizeAndDimensions(
1265 layer->displayFrame.right - layer->displayFrame.left,
1266 layer->displayFrame.bottom - layer->displayFrame.top,
1267 HAL_PIXEL_FORMAT_RGBA_8888,
1268 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001269 }
1270
1271 return size;
1272}
1273
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001274bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1275 //Skip for targets where no device tree value for bw is supplied
1276 if(sMaxBw <= 0.0) {
1277 return true;
1278 }
1279
1280 double panelRefRate =
1281 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1282 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1283 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001284 }
1285 return true;
1286}
1287
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301288bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1289 hwc_display_contents_1_t* list) {
1290
1291 //A-family hw limitation:
1292 //If a layer need alpha scaling, MDP can not support.
1293 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1294 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1295 if(!mCurrentFrame.isFBComposed[i] &&
1296 isAlphaScaled( &list->hwLayers[i])) {
1297 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1298 return false;
1299 }
1300 }
1301 }
1302
1303 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1304 //If multiple layers requires downscaling and also they are overlapping
1305 //fall back to GPU since MDSS can not handle it.
1306 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1307 qdutils::MDPVersion::getInstance().is8x26()) {
1308 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1309 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1310 if(!mCurrentFrame.isFBComposed[i] &&
1311 isDownscaleRequired(botLayer)) {
1312 //if layer-i is marked for MDP and needs downscaling
1313 //check if any MDP layer on top of i & overlaps with layer-i
1314 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1315 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1316 if(!mCurrentFrame.isFBComposed[j] &&
1317 isDownscaleRequired(topLayer)) {
1318 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1319 topLayer->displayFrame);
1320 if(isValidRect(r))
1321 return false;
1322 }
1323 }
1324 }
1325 }
1326 }
1327 return true;
1328}
1329
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001330int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001331 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001332 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001333 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001334
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001335 //number of app layers exceeds MAX_NUM_APP_LAYERS fall back to GPU
1336 //do not cache the information for next draw cycle.
1337 if(numLayers > MAX_NUM_APP_LAYERS) {
1338 ALOGI("%s: Number of App layers exceeded the limit ",
1339 __FUNCTION__);
1340 mCachedFrame.reset();
1341 return -1;
1342 }
1343
Saurabh Shahb39f8152013-08-22 10:21:44 -07001344 //reset old data
1345 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001346 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1347 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301348
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001349 // Detect the start of animation and fall back to GPU only once to cache
1350 // all the layers in FB and display FB content untill animation completes.
1351 if(ctx->listStats[mDpy].isDisplayAnimating) {
1352 mCurrentFrame.needsRedraw = false;
1353 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1354 mCurrentFrame.needsRedraw = true;
1355 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1356 }
1357 setMDPCompLayerFlags(ctx, list);
1358 mCachedFrame.updateCounts(mCurrentFrame);
1359 ret = -1;
1360 return ret;
1361 } else {
1362 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1363 }
1364
Saurabh Shahb39f8152013-08-22 10:21:44 -07001365 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001366 if(isFrameDoable(ctx)) {
1367 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001368
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001369 //Convert from kbps to gbps
1370 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1371 if (ctx->mExtDisplay->isConnected() ||
1372 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1373 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001374 }
1375
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001376 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1377 setMDPCompLayerFlags(ctx, list);
1378 } else {
1379 reset(ctx);
1380 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1381 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001382 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001383 }
1384 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001385 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1386 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001387 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001388 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001389
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001390 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001391 ALOGD("GEOMETRY change: %d",
1392 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001393 android::String8 sDump("");
1394 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001395 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001396 }
1397
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001398 mCachedFrame.cacheAll(list);
1399 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001400 double panelRefRate =
1401 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1402 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001403 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001404}
1405
radhakrishnac9a67412013-09-25 17:40:42 +05301406bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx,
1407 hwc_display_contents_1_t* list, int index) {
1408
1409 bool bRet = true;
1410 hwc_layer_1_t* layer = &list->hwLayers[index];
1411 private_handle_t *hnd = (private_handle_t *)layer->handle;
1412 int mdpIndex = mCurrentFrame.layerToMDP[index];
1413 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1414 info.pipeInfo = new MdpYUVPipeInfo;
1415 info.rot = NULL;
1416 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1417 ePipeType type = MDPCOMP_OV_VG;
1418
1419 pipe_info.lIndex = ovutils::OV_INVALID;
1420 pipe_info.rIndex = ovutils::OV_INVALID;
1421
1422 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1423 if(pipe_info.lIndex == ovutils::OV_INVALID){
1424 bRet = false;
1425 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1426 __FUNCTION__);
1427 }
1428 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1429 if(pipe_info.rIndex == ovutils::OV_INVALID){
1430 bRet = false;
1431 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1432 __FUNCTION__);
1433 }
1434 return bRet;
1435}
Saurabh Shah88e4d272013-09-03 13:31:29 -07001436//=============MDPCompNonSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001437
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001438void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301439 hwc_display_contents_1_t* list){
1440 //As we split 4kx2k yuv layer and program to 2 VG pipes
1441 //(if available) increase mdpcount accordingly
1442 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001443
1444 //If 4k2k Yuv layer split is possible, and if
1445 //fbz is above 4k2k layer, increment fb zorder by 1
1446 //as we split 4k2k layer and increment zorder for right half
1447 //of the layer
1448 if(mCurrentFrame.fbZ >= 0) {
1449 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1450 for(int index = 0; index < n4k2kYuvCount; index++){
1451 int n4k2kYuvIndex =
1452 ctx->listStats[mDpy].yuv4k2kIndices[index];
1453 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1454 mCurrentFrame.fbZ += 1;
1455 }
1456 }
1457 }
radhakrishnac9a67412013-09-25 17:40:42 +05301458}
1459
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001460/*
1461 * Configures pipe(s) for MDP composition
1462 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001463int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001464 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001465 MdpPipeInfoNonSplit& mdp_info =
1466 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001467 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1468 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1469 eIsFg isFg = IS_FG_OFF;
1470 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001471
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001472 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1473 __FUNCTION__, layer, zOrder, dest);
1474
Saurabh Shah88e4d272013-09-03 13:31:29 -07001475 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001476 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001477}
1478
Saurabh Shah88e4d272013-09-03 13:31:29 -07001479bool MDPCompNonSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001480 hwc_display_contents_1_t* list) {
1481 overlay::Overlay& ov = *ctx->mOverlay;
1482 int numPipesNeeded = mCurrentFrame.mdpCount;
1483 int availPipes = ov.availablePipes(mDpy, Overlay::MIXER_DEFAULT);
1484
1485 //Reserve pipe for FB
1486 if(mCurrentFrame.fbCount)
1487 availPipes -= 1;
1488
1489 if(numPipesNeeded > availPipes) {
1490 ALOGD_IF(isDebug(), "%s: Insufficient pipes, dpy %d needed %d, avail %d",
1491 __FUNCTION__, mDpy, numPipesNeeded, availPipes);
1492 return false;
1493 }
1494
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001495 if(not areVGPipesAvailable(ctx, list)) {
1496 return false;
1497 }
1498
1499 return true;
1500}
1501
1502bool MDPCompNonSplit::areVGPipesAvailable(hwc_context_t *ctx,
1503 hwc_display_contents_1_t* list) {
1504 overlay::Overlay& ov = *ctx->mOverlay;
1505 int pipesNeeded = 0;
1506 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1507 if(!mCurrentFrame.isFBComposed[i]) {
1508 hwc_layer_1_t* layer = &list->hwLayers[i];
1509 hwc_rect_t dst = layer->displayFrame;
1510 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301511 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1512 pipesNeeded = pipesNeeded + 2;
1513 }
1514 else if(isYuvBuffer(hnd)) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001515 pipesNeeded++;
1516 }
1517 }
1518 }
1519
1520 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1521 if(pipesNeeded > availableVGPipes) {
1522 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1523 "dpy %d needed %d, avail %d",
1524 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1525 return false;
1526 }
1527
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001528 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001529}
1530
Saurabh Shah88e4d272013-09-03 13:31:29 -07001531bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001532 hwc_display_contents_1_t* list) {
1533 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001534
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001535 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001536
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001537 hwc_layer_1_t* layer = &list->hwLayers[index];
1538 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301539 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1540 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1541 continue;
1542 }
1543 }
1544
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001545 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001546 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001547 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001548 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001549 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001550 ePipeType type = MDPCOMP_OV_ANY;
1551
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001552 if(isYuvBuffer(hnd)) {
1553 type = MDPCOMP_OV_VG;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301554 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001555 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1556 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001557 type = MDPCOMP_OV_DMA;
1558 }
1559
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001560 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001561 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001562 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1563 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001564 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001565 }
1566 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001567 return true;
1568}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001569
radhakrishnac9a67412013-09-25 17:40:42 +05301570int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1571 PipeLayerPair& PipeLayerPair) {
1572 MdpYUVPipeInfo& mdp_info =
1573 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1574 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1575 eIsFg isFg = IS_FG_OFF;
1576 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1577 eDest lDest = mdp_info.lIndex;
1578 eDest rDest = mdp_info.rIndex;
1579
1580 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1581 lDest, rDest, &PipeLayerPair.rot);
1582}
1583
Saurabh Shah88e4d272013-09-03 13:31:29 -07001584bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001585
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001586 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001587 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1588 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001589 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001590
1591 if(!ctx || !list) {
1592 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001593 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001594 }
1595
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301596 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1597 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1598 return true;
1599 }
1600
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001601 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001602 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001603 idleInvalidator->markForSleep();
1604
1605 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001606 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001607
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001608 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1609 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001610 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001611 if(mCurrentFrame.isFBComposed[i]) continue;
1612
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001613 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001614 private_handle_t *hnd = (private_handle_t *)layer->handle;
1615 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001616 if (!(layer->flags & HWC_COLOR_FILL)) {
1617 ALOGE("%s handle null", __FUNCTION__);
1618 return false;
1619 }
1620 // No PLAY for Color layer
1621 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1622 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001623 }
1624
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001625 int mdpIndex = mCurrentFrame.layerToMDP[i];
1626
radhakrishnac9a67412013-09-25 17:40:42 +05301627 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1628 {
1629 MdpYUVPipeInfo& pipe_info =
1630 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1631 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1632 ovutils::eDest indexL = pipe_info.lIndex;
1633 ovutils::eDest indexR = pipe_info.rIndex;
1634 int fd = hnd->fd;
1635 uint32_t offset = hnd->offset;
1636 if(rot) {
1637 rot->queueBuffer(fd, offset);
1638 fd = rot->getDstMemId();
1639 offset = rot->getDstOffset();
1640 }
1641 if(indexL != ovutils::OV_INVALID) {
1642 ovutils::eDest destL = (ovutils::eDest)indexL;
1643 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1644 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1645 if (!ov.queueBuffer(fd, offset, destL)) {
1646 ALOGE("%s: queueBuffer failed for display:%d",
1647 __FUNCTION__, mDpy);
1648 return false;
1649 }
1650 }
1651
1652 if(indexR != ovutils::OV_INVALID) {
1653 ovutils::eDest destR = (ovutils::eDest)indexR;
1654 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1655 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1656 if (!ov.queueBuffer(fd, offset, destR)) {
1657 ALOGE("%s: queueBuffer failed for display:%d",
1658 __FUNCTION__, mDpy);
1659 return false;
1660 }
1661 }
1662 }
1663 else{
1664 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001665 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301666 ovutils::eDest dest = pipe_info.index;
1667 if(dest == ovutils::OV_INVALID) {
1668 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001669 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301670 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001671
radhakrishnac9a67412013-09-25 17:40:42 +05301672 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1673 continue;
1674 }
1675
1676 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1677 using pipe: %d", __FUNCTION__, layer,
1678 hnd, dest );
1679
1680 int fd = hnd->fd;
1681 uint32_t offset = hnd->offset;
1682
1683 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1684 if(rot) {
1685 if(!rot->queueBuffer(fd, offset))
1686 return false;
1687 fd = rot->getDstMemId();
1688 offset = rot->getDstOffset();
1689 }
1690
1691 if (!ov.queueBuffer(fd, offset, dest)) {
1692 ALOGE("%s: queueBuffer failed for display:%d ",
1693 __FUNCTION__, mDpy);
1694 return false;
1695 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001696 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001697
1698 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001699 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001700 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001701}
1702
Saurabh Shah88e4d272013-09-03 13:31:29 -07001703//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001704
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001705void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301706 hwc_display_contents_1_t* list){
1707 //if 4kx2k yuv layer is totally present in either in left half
1708 //or right half then try splitting the yuv layer to avoid decimation
1709 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1710 const int lSplit = getLeftSplit(ctx, mDpy);
1711 for(int index = 0; index < n4k2kYuvCount; index++){
1712 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1713 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1714 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001715 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301716 mCurrentFrame.mdpCount += 1;
1717 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001718 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1719 mCurrentFrame.fbZ += 1;
1720 }
radhakrishnac9a67412013-09-25 17:40:42 +05301721 }
1722}
1723
Saurabh Shah88e4d272013-09-03 13:31:29 -07001724int MDPCompSplit::pipesNeeded(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001725 hwc_display_contents_1_t* list,
1726 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001727 int pipesNeeded = 0;
Saurabh Shah67a38c32013-06-10 16:23:15 -07001728 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001729
1730 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001731
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001732 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1733 if(!mCurrentFrame.isFBComposed[i]) {
1734 hwc_layer_1_t* layer = &list->hwLayers[i];
1735 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001736 if(mixer == Overlay::MIXER_LEFT && dst.left < lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001737 pipesNeeded++;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001738 } else if(mixer == Overlay::MIXER_RIGHT && dst.right > lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001739 pipesNeeded++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001740 }
1741 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001742 }
1743 return pipesNeeded;
1744}
1745
Saurabh Shah88e4d272013-09-03 13:31:29 -07001746bool MDPCompSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001747 hwc_display_contents_1_t* list) {
1748 overlay::Overlay& ov = *ctx->mOverlay;
Saurabh Shah082468e2013-09-12 10:05:32 -07001749 int totalPipesNeeded = 0;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001750
1751 for(int i = 0; i < Overlay::MIXER_MAX; i++) {
1752 int numPipesNeeded = pipesNeeded(ctx, list, i);
1753 int availPipes = ov.availablePipes(mDpy, i);
1754
1755 //Reserve pipe(s)for FB
1756 if(mCurrentFrame.fbCount)
Saurabh Shah082468e2013-09-12 10:05:32 -07001757 numPipesNeeded += 1;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001758
Saurabh Shah082468e2013-09-12 10:05:32 -07001759 totalPipesNeeded += numPipesNeeded;
1760
1761 //Per mixer check.
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001762 if(numPipesNeeded > availPipes) {
1763 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1764 "dpy %d mixer %d needed %d, avail %d",
1765 __FUNCTION__, mDpy, i, numPipesNeeded, availPipes);
1766 return false;
1767 }
1768 }
Saurabh Shah082468e2013-09-12 10:05:32 -07001769
1770 //Per display check, since unused pipes can get counted twice.
1771 int totalPipesAvailable = ov.availablePipes(mDpy);
1772 if(totalPipesNeeded > totalPipesAvailable) {
1773 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1774 "dpy %d needed %d, avail %d",
1775 __FUNCTION__, mDpy, totalPipesNeeded, totalPipesAvailable);
1776 return false;
1777 }
1778
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001779 if(not areVGPipesAvailable(ctx, list)) {
1780 return false;
1781 }
1782
1783 return true;
1784}
1785
1786bool MDPCompSplit::areVGPipesAvailable(hwc_context_t *ctx,
1787 hwc_display_contents_1_t* list) {
1788 overlay::Overlay& ov = *ctx->mOverlay;
1789 int pipesNeeded = 0;
1790 const int lSplit = getLeftSplit(ctx, mDpy);
1791 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1792 if(!mCurrentFrame.isFBComposed[i]) {
1793 hwc_layer_1_t* layer = &list->hwLayers[i];
1794 hwc_rect_t dst = layer->displayFrame;
1795 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301796 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1797 if((dst.left > lSplit)||(dst.right < lSplit)){
1798 pipesNeeded = pipesNeeded + 2;
1799 continue;
1800 }
1801 }
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001802 if(isYuvBuffer(hnd)) {
1803 if(dst.left < lSplit) {
1804 pipesNeeded++;
1805 }
1806 if(dst.right > lSplit) {
1807 pipesNeeded++;
1808 }
1809 }
1810 }
1811 }
1812
1813 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1814 if(pipesNeeded > availableVGPipes) {
1815 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1816 "dpy %d needed %d, avail %d",
1817 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1818 return false;
1819 }
1820
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001821 return true;
1822}
1823
Saurabh Shah88e4d272013-09-03 13:31:29 -07001824bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1825 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001826 ePipeType type) {
1827 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001828 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001829
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001830 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001831 pipe_info.lIndex = ovutils::OV_INVALID;
1832 pipe_info.rIndex = ovutils::OV_INVALID;
1833
1834 if (dst.left < lSplit) {
1835 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001836 if(pipe_info.lIndex == ovutils::OV_INVALID)
1837 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001838 }
1839
1840 if(dst.right > lSplit) {
1841 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1842 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001843 return false;
1844 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001845
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001846 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001847}
1848
Saurabh Shah88e4d272013-09-03 13:31:29 -07001849bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001850 hwc_display_contents_1_t* list) {
1851 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001852
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001853 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001854
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001855 hwc_layer_1_t* layer = &list->hwLayers[index];
1856 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301857 hwc_rect_t dst = layer->displayFrame;
1858 const int lSplit = getLeftSplit(ctx, mDpy);
1859 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1860 if((dst.left > lSplit)||(dst.right < lSplit)){
1861 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1862 continue;
1863 }
1864 }
1865 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001866 int mdpIndex = mCurrentFrame.layerToMDP[index];
1867 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001868 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001869 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001870 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001871 ePipeType type = MDPCOMP_OV_ANY;
1872
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001873 if(isYuvBuffer(hnd)) {
1874 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001875 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001876 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001877 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001878 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001879 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001880
1881 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001882 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1883 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001884 return false;
1885 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001886 }
1887 return true;
1888}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001889
radhakrishnac9a67412013-09-25 17:40:42 +05301890int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1891 PipeLayerPair& PipeLayerPair) {
1892 const int lSplit = getLeftSplit(ctx, mDpy);
1893 hwc_rect_t dst = layer->displayFrame;
1894 if((dst.left > lSplit)||(dst.right < lSplit)){
1895 MdpYUVPipeInfo& mdp_info =
1896 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1897 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1898 eIsFg isFg = IS_FG_OFF;
1899 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1900 eDest lDest = mdp_info.lIndex;
1901 eDest rDest = mdp_info.rIndex;
1902
1903 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1904 lDest, rDest, &PipeLayerPair.rot);
1905 }
1906 else{
1907 return configure(ctx, layer, PipeLayerPair);
1908 }
1909}
1910
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001911/*
1912 * Configures pipe(s) for MDP composition
1913 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001914int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001915 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001916 MdpPipeInfoSplit& mdp_info =
1917 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001918 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1919 eIsFg isFg = IS_FG_OFF;
1920 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1921 eDest lDest = mdp_info.lIndex;
1922 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001923
1924 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
1925 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
1926
Saurabh Shah88e4d272013-09-03 13:31:29 -07001927 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001928 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001929}
1930
Saurabh Shah88e4d272013-09-03 13:31:29 -07001931bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001932
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001933 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001934 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1935 return true;
1936 }
1937
1938 if(!ctx || !list) {
1939 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001940 return false;
1941 }
1942
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301943 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1944 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1945 return true;
1946 }
1947
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001948 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001949 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001950 idleInvalidator->markForSleep();
1951
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001952 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001953 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001954
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001955 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1956 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001957 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001958 if(mCurrentFrame.isFBComposed[i]) continue;
1959
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001960 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001961 private_handle_t *hnd = (private_handle_t *)layer->handle;
1962 if(!hnd) {
1963 ALOGE("%s handle null", __FUNCTION__);
1964 return false;
1965 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001966
1967 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1968 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001969 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001970
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001971 int mdpIndex = mCurrentFrame.layerToMDP[i];
1972
radhakrishnac9a67412013-09-25 17:40:42 +05301973 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1974 {
1975 MdpYUVPipeInfo& pipe_info =
1976 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1977 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1978 ovutils::eDest indexL = pipe_info.lIndex;
1979 ovutils::eDest indexR = pipe_info.rIndex;
1980 int fd = hnd->fd;
1981 uint32_t offset = hnd->offset;
1982 if(rot) {
1983 rot->queueBuffer(fd, offset);
1984 fd = rot->getDstMemId();
1985 offset = rot->getDstOffset();
1986 }
1987 if(indexL != ovutils::OV_INVALID) {
1988 ovutils::eDest destL = (ovutils::eDest)indexL;
1989 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1990 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1991 if (!ov.queueBuffer(fd, offset, destL)) {
1992 ALOGE("%s: queueBuffer failed for display:%d",
1993 __FUNCTION__, mDpy);
1994 return false;
1995 }
1996 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001997
radhakrishnac9a67412013-09-25 17:40:42 +05301998 if(indexR != ovutils::OV_INVALID) {
1999 ovutils::eDest destR = (ovutils::eDest)indexR;
2000 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2001 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2002 if (!ov.queueBuffer(fd, offset, destR)) {
2003 ALOGE("%s: queueBuffer failed for display:%d",
2004 __FUNCTION__, mDpy);
2005 return false;
2006 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002007 }
2008 }
radhakrishnac9a67412013-09-25 17:40:42 +05302009 else{
2010 MdpPipeInfoSplit& pipe_info =
2011 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2012 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002013
radhakrishnac9a67412013-09-25 17:40:42 +05302014 ovutils::eDest indexL = pipe_info.lIndex;
2015 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002016
radhakrishnac9a67412013-09-25 17:40:42 +05302017 int fd = hnd->fd;
2018 int offset = hnd->offset;
2019
2020 if(ctx->mAD->isModeOn()) {
2021 if(ctx->mAD->draw(ctx, fd, offset)) {
2022 fd = ctx->mAD->getDstFd(ctx);
2023 offset = ctx->mAD->getDstOffset(ctx);
2024 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002025 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002026
radhakrishnac9a67412013-09-25 17:40:42 +05302027 if(rot) {
2028 rot->queueBuffer(fd, offset);
2029 fd = rot->getDstMemId();
2030 offset = rot->getDstOffset();
2031 }
2032
2033 //************* play left mixer **********
2034 if(indexL != ovutils::OV_INVALID) {
2035 ovutils::eDest destL = (ovutils::eDest)indexL;
2036 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2037 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2038 if (!ov.queueBuffer(fd, offset, destL)) {
2039 ALOGE("%s: queueBuffer failed for left mixer",
2040 __FUNCTION__);
2041 return false;
2042 }
2043 }
2044
2045 //************* play right mixer **********
2046 if(indexR != ovutils::OV_INVALID) {
2047 ovutils::eDest destR = (ovutils::eDest)indexR;
2048 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2049 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2050 if (!ov.queueBuffer(fd, offset, destR)) {
2051 ALOGE("%s: queueBuffer failed for right mixer",
2052 __FUNCTION__);
2053 return false;
2054 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002055 }
2056 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002057
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002058 layerProp[i].mFlags &= ~HWC_MDPCOMP;
2059 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002060
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002061 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002062}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002063}; //namespace
2064