blob: 6ff1aa3abfe8abf592343584c158581dee0440aa [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08002 * Copyright (C) 2012-2014, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Saurabh Shahd53bc5f2014-02-05 10:17:43 -0800236 fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800303 private_handle_t *hnd = (private_handle_t *)layer->handle;
304
305 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700306 if (layer->flags & HWC_COLOR_FILL) {
307 // Color layer
308 return true;
309 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800310 ALOGE("%s: layer handle is NULL", __FUNCTION__);
311 return false;
312 }
313
Naseer Ahmede850a802013-09-06 13:12:52 -0400314 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400315 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400316 return false;
317
Saurabh Shah62e1d732013-09-17 10:44:05 -0700318 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700319 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700320 int crop_w = crop.right - crop.left;
321 int crop_h = crop.bottom - crop.top;
322 int dst_w = dst.right - dst.left;
323 int dst_h = dst.bottom - dst.top;
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800324 float w_scale = ((float)crop_w / (float)dst_w);
325 float h_scale = ((float)crop_h / (float)dst_h);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700326
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800327 /* Workaround for MDP HW limitation in DSI command mode panels where
328 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
329 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530330 * There also is a HW limilation in MDP, minimum block size is 2x2
331 * Fallback to GPU if height is less than 2.
332 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800333 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800334 return false;
335
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800336 if((w_scale > 1.0f) || (h_scale > 1.0f)) {
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800337 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700338 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800339 const float w_dscale = w_scale;
340 const float h_dscale = h_scale;
341
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800342 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
343 /* Workaround for downscales larger than 4x.
344 * Will be removed once decimator block is enabled for MDSS
345 */
346 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
347 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
348 h_dscale > downscale)
349 return false;
350 } else {
351 if(w_dscale > 64 || h_dscale > 64)
352 return false;
353 }
354 } else { //A-family
355 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700356 return false;
357 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700358 }
359
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800360 if((w_scale < 1.0f) || (h_scale < 1.0f)) {
361 const uint32_t upscale =
362 qdutils::MDPVersion::getInstance().getMaxMDPUpscale();
363 const float w_uscale = 1.0f / w_scale;
364 const float h_uscale = 1.0f / h_scale;
365
366 if(w_uscale > upscale || h_uscale > upscale)
367 return false;
368 }
369
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800370 return true;
371}
372
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700373ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
374 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800375 overlay::Overlay& ov = *ctx->mOverlay;
376 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
377
378 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800379 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700380 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800381 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800382 return mdp_pipe;
383 }
384 case MDPCOMP_OV_ANY:
385 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700386 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800387 if(mdp_pipe != ovutils::OV_INVALID) {
388 return mdp_pipe;
389 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800390
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800391 if(type == MDPCOMP_OV_RGB) {
392 //Requested only for RGB pipe
393 break;
394 }
395 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700396 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800397 default:
398 ALOGE("%s: Invalid pipe type",__FUNCTION__);
399 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800400 };
401 return ovutils::OV_INVALID;
402}
403
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800404bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700405 bool ret = true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800406
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800407 if(!isEnabled()) {
408 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700409 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700410 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800411 ctx->mVideoTransFlag &&
412 isSecondaryConnected(ctx)) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700413 //1 Padding round to shift pipes across mixers
414 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
415 __FUNCTION__);
416 ret = false;
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800417 } else if(isSecondaryConfiguring(ctx)) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800418 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800419 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700420 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700421 } else if(ctx->isPaddingRound) {
Raj Kamal9ed3d6b2014-02-07 16:15:17 +0530422 ALOGD_IF(isDebug(), "%s: padding round invoked for dpy %d",
423 __FUNCTION__,mDpy);
Saurabh Shahaa236822013-04-24 18:07:26 -0700424 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700425 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700426 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800427}
428
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800429/*
430 * 1) Identify layers that are not visible in the updating ROI and drop them
431 * from composition.
432 * 2) If we have a scaling layers which needs cropping against generated ROI.
433 * Reset ROI to full resolution.
434 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700435bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
436 hwc_display_contents_1_t* list, hwc_rect_t roi) {
437 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
438
439 if(!isValidRect(roi))
440 return false;
441
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800442 hwc_rect_t visibleRect = roi;
443
444 for(int i = numAppLayers - 1; i >= 0; i--){
445
446 if(!isValidRect(visibleRect)) {
447 mCurrentFrame.drop[i] = true;
448 mCurrentFrame.dropCount++;
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800449 continue;
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800450 }
451
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700452 const hwc_layer_1_t* layer = &list->hwLayers[i];
453
454 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700455 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700456
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800457 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700458
459 int res_w = res.right - res.left;
460 int res_h = res.bottom - res.top;
461 int dst_w = dstRect.right - dstRect.left;
462 int dst_h = dstRect.bottom - dstRect.top;
463
464 if(!isValidRect(res)) {
465 mCurrentFrame.drop[i] = true;
466 mCurrentFrame.dropCount++;
467 }else {
468 /* Reset frame ROI when any layer which needs scaling also needs ROI
469 * cropping */
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800470 if((res_w != dst_w || res_h != dst_h) && needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800471 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700472 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
473 mCurrentFrame.dropCount = 0;
474 return false;
475 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800476
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800477 /* deduct any opaque region from visibleRect */
478 if (layer->blending == HWC_BLENDING_NONE)
479 visibleRect = deductRect(visibleRect, res);
480 }
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700481 }
482 return true;
483}
484
485void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
486 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
487
488 if(!sEnablePartialFrameUpdate) {
489 return;
490 }
491
492 if(mDpy || isDisplaySplit(ctx, mDpy)){
493 ALOGE_IF(isDebug(), "%s: ROI not supported for"
494 "the (1) external / virtual display's (2) dual DSI displays",
495 __FUNCTION__);
496 return;
497 }
498
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800499 if(isSkipPresent(ctx, mDpy))
500 return;
501
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700502 if(list->flags & HWC_GEOMETRY_CHANGED)
503 return;
504
505 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
506 for(int index = 0; index < numAppLayers; index++ ) {
507 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
508 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
509 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700510 hwc_rect_t srcRect = integerizeSourceCrop(
511 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700512
513 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700514 roi = getUnion(roi, dstRect);
515 }
516 }
517
518 if(!validateAndApplyROI(ctx, list, roi)){
519 roi = (struct hwc_rect) {0, 0,
520 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
521 }
522
523 ctx->listStats[mDpy].roi.x = roi.left;
524 ctx->listStats[mDpy].roi.y = roi.top;
525 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
526 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
527
528 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
529 roi.left, roi.top, roi.right, roi.bottom);
530}
531
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800532/* Checks for conditions where all the layers marked for MDP comp cannot be
533 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800534bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800535 hwc_display_contents_1_t* list){
536
Saurabh Shahaa236822013-04-24 18:07:26 -0700537 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800538 int priDispW = ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800539
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700540 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700541 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
542 return false;
543 }
544
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800545 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700546 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
547 __FUNCTION__,
548 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800549 return false;
550 }
551
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800552 if(mDpy > HWC_DISPLAY_PRIMARY && (priDispW > MAX_DISPLAY_DIM) &&
553 (ctx->dpyAttr[mDpy].xres < MAX_DISPLAY_DIM)) {
554 // Disable MDP comp on Secondary when the primary is highres panel and
555 // the secondary is a normal 1080p, because, MDP comp on secondary under
556 // in such usecase, decimation gets used for downscale and there will be
557 // a quality mismatch when there will be a fallback to GPU comp
558 ALOGD_IF(isDebug(), "%s: Disable MDP Compositon for Secondary Disp",
559 __FUNCTION__);
560 return false;
561 }
562
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800563 // check for action safe flag and downscale mode which requires scaling.
564 if(ctx->dpyAttr[mDpy].mActionSafePresent
565 || ctx->dpyAttr[mDpy].mDownScaleMode) {
566 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
567 return false;
568 }
569
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800570 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800571 hwc_layer_1_t* layer = &list->hwLayers[i];
572 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800573
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700574 if(isYuvBuffer(hnd) && has90Transform(layer)) {
575 if(!canUseRotator(ctx, mDpy)) {
576 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
577 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700578 return false;
579 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800580 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530581
582 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
583 // may not need it if Gfx pre-rotation can handle all flips & rotations
584 if(qdutils::MDPVersion::getInstance().is8x26() &&
585 (ctx->dpyAttr[mDpy].xres > 1024) &&
586 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
587 (!isYuvBuffer(hnd)))
588 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800589 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700590
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700591 if(ctx->mAD->isDoable()) {
592 return false;
593 }
594
Saurabh Shahaa236822013-04-24 18:07:26 -0700595 //If all above hard conditions are met we can do full or partial MDP comp.
596 bool ret = false;
597 if(fullMDPComp(ctx, list)) {
598 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700599 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700600 ret = true;
601 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530602
Saurabh Shahaa236822013-04-24 18:07:26 -0700603 return ret;
604}
605
606bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700607 //Will benefit presentation / secondary-only layer.
608 if((mDpy > HWC_DISPLAY_PRIMARY) &&
609 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
610 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
611 return false;
612 }
613
614 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
615 for(int i = 0; i < numAppLayers; i++) {
616 hwc_layer_1_t* layer = &list->hwLayers[i];
617 if(not isSupportedForMDPComp(ctx, layer)) {
618 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
619 return false;
620 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800621
622 //For 8x26, if there is only one layer which needs scale for secondary
623 //while no scale for primary display, DMA pipe is occupied by primary.
624 //If need to fall back to GLES composition, virtual display lacks DMA
625 //pipe and error is reported.
626 if(qdutils::MDPVersion::getInstance().is8x26() &&
627 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530628 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800629 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700630 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800631
Saurabh Shahaa236822013-04-24 18:07:26 -0700632 mCurrentFrame.fbCount = 0;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700633 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
634 sizeof(mCurrentFrame.isFBComposed));
635 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
636 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700637
radhakrishnac9a67412013-09-25 17:40:42 +0530638 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800639 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530640 }
641
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800642 if(!postHeuristicsHandling(ctx, list)) {
643 ALOGD_IF(isDebug(), "post heuristic handling failed");
644 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700645 return false;
646 }
647
Saurabh Shahaa236822013-04-24 18:07:26 -0700648 return true;
649}
650
651bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
652{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700653 if(!sEnableMixedMode) {
654 //Mixed mode is disabled. No need to even try caching.
655 return false;
656 }
657
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700658 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800659 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
660 ret = loadBasedCompPreferGPU(ctx, list) or
661 loadBasedCompPreferMDP(ctx, list) or
662 cacheBasedComp(ctx, list);
663 } else {
664 ret = cacheBasedComp(ctx, list) or
665 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800666 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700667 }
668
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700669 return ret;
670}
671
672bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
673 hwc_display_contents_1_t* list) {
674 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700675 mCurrentFrame.reset(numAppLayers);
676 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700677
678 //If an MDP marked layer is unsupported cannot do partial MDP Comp
679 for(int i = 0; i < numAppLayers; i++) {
680 if(!mCurrentFrame.isFBComposed[i]) {
681 hwc_layer_1_t* layer = &list->hwLayers[i];
682 if(not isSupportedForMDPComp(ctx, layer)) {
683 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
684 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800685 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700686 return false;
687 }
688 }
689 }
690
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700691 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530692 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700693 if(!ret) {
694 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800695 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700696 return false;
697 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700698
699 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700700
radhakrishnac9a67412013-09-25 17:40:42 +0530701 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800702 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530703 }
704
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700705 //Will benefit cases where a video has non-updating background.
706 if((mDpy > HWC_DISPLAY_PRIMARY) and
707 (mdpCount > MAX_SEC_LAYERS)) {
708 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800709 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700710 return false;
711 }
712
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800713 if(!postHeuristicsHandling(ctx, list)) {
714 ALOGD_IF(isDebug(), "post heuristic handling failed");
715 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700716 return false;
717 }
718
Saurabh Shahaa236822013-04-24 18:07:26 -0700719 return true;
720}
721
Saurabh Shahb772ae32013-11-18 15:40:02 -0800722bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700723 hwc_display_contents_1_t* list) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800724 if(not isLoadBasedCompDoable(ctx)) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800725 return false;
726 }
727
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700728 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
729 mCurrentFrame.reset(numAppLayers);
730
Saurabh Shahb772ae32013-11-18 15:40:02 -0800731 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
732 mDpy, Overlay::MIXER_DEFAULT));
733 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800734 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
735 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800736
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700737 if(batchSize <= 0) {
738 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
739 return false;
740 }
741
742 int minBatchStart = -1;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800743 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700744 size_t minBatchPixelCount = SIZE_MAX;
745
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800746 /* Iterate through the layer list to find out a contigous batch of batchSize
747 * non-dropped layers with loweest pixel count */
748 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
749 if(mCurrentFrame.drop[i])
750 continue;
751
752 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700753 uint32_t batchPixelCount = 0;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800754 int j = i;
755 for(; j < numAppLayers && batchCount; j++){
756 if(!mCurrentFrame.drop[j]) {
757 hwc_layer_1_t* layer = &list->hwLayers[j];
758 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
759 hwc_rect_t dst = layer->displayFrame;
760
761 /* If we have a valid ROI, count pixels only for the MDP fetched
762 * region of the buffer */
763 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
764 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
765 hwc_rect_t roi;
766 roi.left = ctx->listStats[mDpy].roi.x;
767 roi.top = ctx->listStats[mDpy].roi.y;
768 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
769 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
770
771 /* valid ROI means no scaling layer is composed. So check
772 * only intersection to find actual fetched pixels */
773 crop = getIntersection(roi, dst);
774 }
775
776 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700777 (crop.bottom - crop.top);
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800778 batchCount--;
779 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700780 }
781
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800782 /* we dont want to program any batch of size lesser than batchSize */
783 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700784 minBatchPixelCount = batchPixelCount;
785 minBatchStart = i;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800786 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700787 }
788 }
789
790 if(minBatchStart < 0) {
791 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
792 __FUNCTION__, batchSize, numAppLayers);
793 return false;
794 }
795
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800796 /* non-dropped layers falling ouside the selected batch will be marked for
797 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700798 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800799 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700800 hwc_layer_1_t* layer = &list->hwLayers[i];
801 if(not isSupportedForMDPComp(ctx, layer)) {
802 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
803 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800804 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700805 return false;
806 }
807 mCurrentFrame.isFBComposed[i] = false;
808 }
809 }
810
811 mCurrentFrame.fbZ = minBatchStart;
812 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800813 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
814 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700815
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800816 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
817 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
818 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800819
radhakrishnac9a67412013-09-25 17:40:42 +0530820 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800821 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530822 }
823
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800824 if(!postHeuristicsHandling(ctx, list)) {
825 ALOGD_IF(isDebug(), "post heuristic handling failed");
826 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700827 return false;
828 }
829
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700830 return true;
831}
832
Saurabh Shahb772ae32013-11-18 15:40:02 -0800833bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
834 hwc_display_contents_1_t* list) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800835 if(not isLoadBasedCompDoable(ctx)) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800836 return false;
837 }
838
Saurabh Shahb772ae32013-11-18 15:40:02 -0800839 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800840 mCurrentFrame.reset(numAppLayers);
841
Saurabh Shahb772ae32013-11-18 15:40:02 -0800842 //Full screen is from ib perspective, not actual full screen
843 const int bpp = 4;
844 double panelRefRate =
845 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
846
847 double bwLeft = sMaxBw - sBwClaimed;
848
849 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
850 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
851
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800852 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
853 - (fullScreenLayers - 1);
854
Saurabh Shahb772ae32013-11-18 15:40:02 -0800855 //If batch size is not at least 2, we aren't really preferring MDP, since
856 //only 1 layer going to GPU could actually translate into an entire FB
857 //needed to be fetched by MDP, thus needing more b/w rather than less.
858 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
859 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
860 return false;
861 }
862
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800863 //Find top fbBatchSize non-dropped layers to get your batch
864 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
865 for(int i = numAppLayers - 1; i >= 0; i--) {
866 if(mCurrentFrame.drop[i])
867 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800868
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800869 if(fbEnd < 0)
870 fbEnd = i;
871
872 if(!(--batchCount)) {
873 fbStart = i;
874 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800875 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800876 }
877
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800878 //Bottom layers constitute MDP batch
879 for(int i = 0; i < fbStart; i++) {
880 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
881 hwc_layer_1_t* layer = &list->hwLayers[i];
882 if(not isSupportedForMDPComp(ctx, layer)) {
883 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
884 __FUNCTION__, i);
885 reset(ctx);
886 return false;
887 }
888 mCurrentFrame.isFBComposed[i] = false;
889 }
890 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800891
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800892 mCurrentFrame.fbZ = fbStart;
893 mCurrentFrame.fbCount = fbBatchSize;
894 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
895 - mCurrentFrame.dropCount;
896
897 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
898 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
899 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800900
radhakrishnac9a67412013-09-25 17:40:42 +0530901 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800902 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530903 }
904
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800905 if(!postHeuristicsHandling(ctx, list)) {
906 ALOGD_IF(isDebug(), "post heuristic handling failed");
907 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800908 return false;
909 }
910
Saurabh Shahb772ae32013-11-18 15:40:02 -0800911 return true;
912}
913
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -0800914bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530915 if(mDpy or isSecurePresent(ctx, mDpy) or
916 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700917 return false;
918 }
919 return true;
920}
921
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800922bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
923 hwc_display_contents_1_t* list) {
924 const bool secureOnly = true;
925 return videoOnlyComp(ctx, list, not secureOnly) or
926 videoOnlyComp(ctx, list, secureOnly);
927}
928
929bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700930 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700931 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700932
Saurabh Shahaa236822013-04-24 18:07:26 -0700933 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700934 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700935 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700936
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800937 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
938 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700939 return false;
940 }
941
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800942 /* Bail out if we are processing only secured video layers
943 * and we dont have any */
944 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800945 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800946 return false;
947 }
948
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800949 if(mCurrentFrame.fbCount)
950 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700951
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800952 if(sEnable4k2kYUVSplit){
953 adjustForSourceSplit(ctx, list);
954 }
955
956 if(!postHeuristicsHandling(ctx, list)) {
957 ALOGD_IF(isDebug(), "post heuristic handling failed");
958 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700959 return false;
960 }
961
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800962 return true;
963}
964
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800965/* Checks for conditions where YUV layers cannot be bypassed */
966bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700967 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700968 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800969 return false;
970 }
971
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700972 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
973 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
974 return false;
975 }
976
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800977 if(isSecuring(ctx, layer)) {
978 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
979 return false;
980 }
981
Saurabh Shah4fdde762013-04-30 18:47:33 -0700982 if(!isValidDimension(ctx, layer)) {
983 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
984 __FUNCTION__);
985 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800986 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700987
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400988 if(layer->planeAlpha < 0xFF) {
989 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
990 in video only mode",
991 __FUNCTION__);
992 return false;
993 }
994
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800995 return true;
996}
997
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530998/* starts at fromIndex and check for each layer to find
999 * if it it has overlapping with any Updating layer above it in zorder
1000 * till the end of the batch. returns true if it finds any intersection */
1001bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
1002 int fromIndex, int toIndex) {
1003 for(int i = fromIndex; i < toIndex; i++) {
1004 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1005 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
1006 return false;
1007 }
1008 }
1009 }
1010 return true;
1011}
1012
1013/* Checks if given layer at targetLayerIndex has any
1014 * intersection with all the updating layers in beween
1015 * fromIndex and toIndex. Returns true if it finds intersectiion */
1016bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1017 int fromIndex, int toIndex, int targetLayerIndex) {
1018 for(int i = fromIndex; i <= toIndex; i++) {
1019 if(!mCurrentFrame.isFBComposed[i]) {
1020 if(areLayersIntersecting(&list->hwLayers[i],
1021 &list->hwLayers[targetLayerIndex])) {
1022 return true;
1023 }
1024 }
1025 }
1026 return false;
1027}
1028
1029int MDPComp::getBatch(hwc_display_contents_1_t* list,
1030 int& maxBatchStart, int& maxBatchEnd,
1031 int& maxBatchCount) {
1032 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301033 int fbZOrder =-1;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001034 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301035 while (i < mCurrentFrame.layerCount) {
1036 int batchCount = 0;
1037 int batchStart = i;
1038 int batchEnd = i;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001039 /* Adjust batch Z order with the dropped layers so far */
1040 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301041 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301042 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301043 while(i < mCurrentFrame.layerCount) {
1044 if(!mCurrentFrame.isFBComposed[i]) {
1045 if(!batchCount) {
1046 i++;
1047 break;
1048 }
1049 updatingLayersAbove++;
1050 i++;
1051 continue;
1052 } else {
1053 if(mCurrentFrame.drop[i]) {
1054 i++;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001055 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301056 continue;
1057 } else if(updatingLayersAbove <= 0) {
1058 batchCount++;
1059 batchEnd = i;
1060 i++;
1061 continue;
1062 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1063
1064 // We have a valid updating layer already. If layer-i not
1065 // have overlapping with all updating layers in between
1066 // batch-start and i, then we can add layer i to batch.
1067 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1068 batchCount++;
1069 batchEnd = i;
1070 i++;
1071 continue;
1072 } else if(canPushBatchToTop(list, batchStart, i)) {
1073 //If All the non-updating layers with in this batch
1074 //does not have intersection with the updating layers
1075 //above in z-order, then we can safely move the batch to
1076 //higher z-order. Increment fbZ as it is moving up.
1077 if( firstZReverseIndex < 0) {
1078 firstZReverseIndex = i;
1079 }
1080 batchCount++;
1081 batchEnd = i;
1082 fbZ += updatingLayersAbove;
1083 i++;
1084 updatingLayersAbove = 0;
1085 continue;
1086 } else {
1087 //both failed.start the loop again from here.
1088 if(firstZReverseIndex >= 0) {
1089 i = firstZReverseIndex;
1090 }
1091 break;
1092 }
1093 }
1094 }
1095 }
1096 if(batchCount > maxBatchCount) {
1097 maxBatchCount = batchCount;
1098 maxBatchStart = batchStart;
1099 maxBatchEnd = batchEnd;
1100 fbZOrder = fbZ;
1101 }
1102 }
1103 return fbZOrder;
1104}
1105
1106bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1107 hwc_display_contents_1_t* list) {
1108 /* Idea is to keep as many non-updating(cached) layers in FB and
1109 * send rest of them through MDP. This is done in 2 steps.
1110 * 1. Find the maximum contiguous batch of non-updating layers.
1111 * 2. See if we can improve this batch size for caching by adding
1112 * opaque layers around the batch, if they don't have
1113 * any overlapping with the updating layers in between.
1114 * NEVER mark an updating layer for caching.
1115 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001116
1117 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001118 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001119 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301120 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001121
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001122 /* Nothing is cached. No batching needed */
1123 if(mCurrentFrame.fbCount == 0) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001124 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001125 }
Saurabh Shahd53bc5f2014-02-05 10:17:43 -08001126
1127 /* No MDP comp layers, try to use other comp modes */
1128 if(mCurrentFrame.mdpCount == 0) {
1129 return false;
Saurabh Shahaa236822013-04-24 18:07:26 -07001130 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001131
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301132 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001133
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301134 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001135 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001136 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001137 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301138 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001139 if(!mCurrentFrame.drop[i]){
1140 //If an unsupported layer is being attempted to
1141 //be pulled out we should fail
1142 if(not isSupportedForMDPComp(ctx, layer)) {
1143 return false;
1144 }
1145 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001146 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001147 }
1148 }
1149
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301150 // update the frame data
1151 mCurrentFrame.fbZ = fbZ;
1152 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001153 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001154 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001155
1156 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301157 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001158
1159 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001160}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001161
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001162void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001163 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001164 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001165 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001166
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001167 for(int i = 0; i < numAppLayers; i++) {
1168 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001169 if(!mCurrentFrame.drop[i])
1170 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001171 mCurrentFrame.isFBComposed[i] = true;
1172 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001173 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001174 }
1175 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001176
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001177 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001178 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1179 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001180
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001181 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1182 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1183 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001184}
1185
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001186void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1187 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001188 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1189 for(int index = 0;index < nYuvCount; index++){
1190 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1191 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1192
1193 if(!isYUVDoable(ctx, layer)) {
1194 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1195 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1196 mCurrentFrame.fbCount++;
1197 }
1198 } else {
1199 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001200 private_handle_t *hnd = (private_handle_t *)layer->handle;
1201 if(!secureOnly || isSecureBuffer(hnd)) {
1202 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1203 mCurrentFrame.fbCount--;
1204 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001205 }
1206 }
1207 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001208
1209 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001210 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1211 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001212 mCurrentFrame.fbCount);
1213}
1214
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001215bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1216 hwc_display_contents_1_t* list) {
1217
1218 //Capability checks
1219 if(!resourceCheck(ctx, list)) {
1220 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1221 return false;
1222 }
1223
1224 //Limitations checks
1225 if(!hwLimitationsCheck(ctx, list)) {
1226 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1227 return false;
1228 }
1229
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001230 //Configure framebuffer first if applicable
1231 if(mCurrentFrame.fbZ >= 0) {
1232 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1233 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1234 __FUNCTION__);
1235 return false;
1236 }
1237 }
1238
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001239 mCurrentFrame.map();
1240
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001241 if(!allocLayerPipes(ctx, list)) {
1242 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001243 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001244 }
1245
1246 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001247 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001248 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001249 int mdpIndex = mCurrentFrame.layerToMDP[index];
1250 hwc_layer_1_t* layer = &list->hwLayers[index];
1251
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301252 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1253 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1254 mdpNextZOrder++;
1255 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001256 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1257 cur_pipe->zOrder = mdpNextZOrder++;
1258
radhakrishnac9a67412013-09-25 17:40:42 +05301259 private_handle_t *hnd = (private_handle_t *)layer->handle;
1260 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1261 if(configure4k2kYuv(ctx, layer,
1262 mCurrentFrame.mdpToLayer[mdpIndex])
1263 != 0 ){
1264 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1265 for layer %d",__FUNCTION__, index);
1266 return false;
1267 }
1268 else{
1269 mdpNextZOrder++;
1270 }
1271 continue;
1272 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001273 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1274 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301275 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001276 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001277 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001278 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001279 }
1280
Saurabh Shaha36be922013-12-16 18:18:39 -08001281 if(!ctx->mOverlay->validateAndSet(mDpy, ctx->dpyAttr[mDpy].fd)) {
1282 ALOGD_IF(isDebug(), "%s: Failed to validate and set overlay for dpy %d"
1283 ,__FUNCTION__, mDpy);
1284 return false;
1285 }
1286
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001287 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001288 return true;
1289}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001290
Saurabh Shah173f4242013-11-20 09:50:12 -08001291bool MDPComp::resourceCheck(hwc_context_t *ctx,
1292 hwc_display_contents_1_t *list) {
1293 const bool fbUsed = mCurrentFrame.fbCount;
1294 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1295 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1296 return false;
1297 }
1298
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001299 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001300 if(!bandwidthCheck(ctx, size)) {
1301 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1302 return false;
1303 }
1304
1305 return true;
1306}
1307
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001308double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001309 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001310 double size = 0;
1311 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001312
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001313 //Skip for targets where no device tree value for bw is supplied
1314 if(sMaxBw <= 0.0) {
1315 return 0.0;
1316 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001317
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001318 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1319 if(!mCurrentFrame.isFBComposed[i]) {
1320 hwc_layer_1_t* layer = &list->hwLayers[i];
1321 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001322 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001323 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001324 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001325 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001326 size += (bpp * (crop.right - crop.left) *
1327 (crop.bottom - crop.top) *
1328 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1329 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001330 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001331 }
1332 }
1333
1334 if(mCurrentFrame.fbCount) {
1335 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001336 int tempw, temph;
1337 size += (getBufferSizeAndDimensions(
1338 layer->displayFrame.right - layer->displayFrame.left,
1339 layer->displayFrame.bottom - layer->displayFrame.top,
1340 HAL_PIXEL_FORMAT_RGBA_8888,
1341 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001342 }
1343
1344 return size;
1345}
1346
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001347bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1348 //Skip for targets where no device tree value for bw is supplied
1349 if(sMaxBw <= 0.0) {
1350 return true;
1351 }
1352
1353 double panelRefRate =
1354 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1355 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1356 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001357 }
1358 return true;
1359}
1360
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301361bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1362 hwc_display_contents_1_t* list) {
1363
1364 //A-family hw limitation:
1365 //If a layer need alpha scaling, MDP can not support.
1366 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1367 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1368 if(!mCurrentFrame.isFBComposed[i] &&
1369 isAlphaScaled( &list->hwLayers[i])) {
1370 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1371 return false;
1372 }
1373 }
1374 }
1375
1376 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1377 //If multiple layers requires downscaling and also they are overlapping
1378 //fall back to GPU since MDSS can not handle it.
1379 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1380 qdutils::MDPVersion::getInstance().is8x26()) {
1381 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1382 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1383 if(!mCurrentFrame.isFBComposed[i] &&
1384 isDownscaleRequired(botLayer)) {
1385 //if layer-i is marked for MDP and needs downscaling
1386 //check if any MDP layer on top of i & overlaps with layer-i
1387 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1388 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1389 if(!mCurrentFrame.isFBComposed[j] &&
1390 isDownscaleRequired(topLayer)) {
1391 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1392 topLayer->displayFrame);
1393 if(isValidRect(r))
1394 return false;
1395 }
1396 }
1397 }
1398 }
1399 }
1400 return true;
1401}
1402
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001403int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001404 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001405 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001406 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001407
Raj Kamal9ed3d6b2014-02-07 16:15:17 +05301408 //Do not cache the information for next draw cycle.
1409 if(numLayers > MAX_NUM_APP_LAYERS or (!numLayers)) {
1410 ALOGI("%s: Unsupported layer count for mdp composition",
1411 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001412 mCachedFrame.reset();
1413 return -1;
1414 }
1415
Saurabh Shahb39f8152013-08-22 10:21:44 -07001416 //reset old data
1417 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001418 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1419 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301420
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001421 // Detect the start of animation and fall back to GPU only once to cache
1422 // all the layers in FB and display FB content untill animation completes.
1423 if(ctx->listStats[mDpy].isDisplayAnimating) {
1424 mCurrentFrame.needsRedraw = false;
1425 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1426 mCurrentFrame.needsRedraw = true;
1427 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1428 }
1429 setMDPCompLayerFlags(ctx, list);
1430 mCachedFrame.updateCounts(mCurrentFrame);
1431 ret = -1;
1432 return ret;
1433 } else {
1434 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1435 }
1436
Saurabh Shahb39f8152013-08-22 10:21:44 -07001437 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001438 if(isFrameDoable(ctx)) {
1439 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001440
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001441 //Convert from kbps to gbps
1442 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1443 if (ctx->mExtDisplay->isConnected() ||
1444 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1445 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001446 }
1447
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001448 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1449 setMDPCompLayerFlags(ctx, list);
1450 } else {
1451 reset(ctx);
1452 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1453 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001454 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001455 }
1456 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001457 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1458 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001459 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001460 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001461
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001462 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001463 ALOGD("GEOMETRY change: %d",
1464 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001465 android::String8 sDump("");
1466 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001467 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001468 }
1469
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001470 mCachedFrame.cacheAll(list);
1471 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001472 double panelRefRate =
1473 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1474 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001475 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001476}
1477
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001478bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx, int index) {
radhakrishnac9a67412013-09-25 17:40:42 +05301479
1480 bool bRet = true;
radhakrishnac9a67412013-09-25 17:40:42 +05301481 int mdpIndex = mCurrentFrame.layerToMDP[index];
1482 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1483 info.pipeInfo = new MdpYUVPipeInfo;
1484 info.rot = NULL;
1485 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1486 ePipeType type = MDPCOMP_OV_VG;
1487
1488 pipe_info.lIndex = ovutils::OV_INVALID;
1489 pipe_info.rIndex = ovutils::OV_INVALID;
1490
1491 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1492 if(pipe_info.lIndex == ovutils::OV_INVALID){
1493 bRet = false;
1494 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1495 __FUNCTION__);
1496 }
1497 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1498 if(pipe_info.rIndex == ovutils::OV_INVALID){
1499 bRet = false;
1500 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1501 __FUNCTION__);
1502 }
1503 return bRet;
1504}
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001505//=============MDPCompNonSplit==================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001506
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001507void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001508 hwc_display_contents_1_t*) {
radhakrishnac9a67412013-09-25 17:40:42 +05301509 //As we split 4kx2k yuv layer and program to 2 VG pipes
1510 //(if available) increase mdpcount accordingly
1511 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001512
1513 //If 4k2k Yuv layer split is possible, and if
1514 //fbz is above 4k2k layer, increment fb zorder by 1
1515 //as we split 4k2k layer and increment zorder for right half
1516 //of the layer
1517 if(mCurrentFrame.fbZ >= 0) {
1518 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1519 for(int index = 0; index < n4k2kYuvCount; index++){
1520 int n4k2kYuvIndex =
1521 ctx->listStats[mDpy].yuv4k2kIndices[index];
1522 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1523 mCurrentFrame.fbZ += 1;
1524 }
1525 }
1526 }
radhakrishnac9a67412013-09-25 17:40:42 +05301527}
1528
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001529/*
1530 * Configures pipe(s) for MDP composition
1531 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001532int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001533 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001534 MdpPipeInfoNonSplit& mdp_info =
1535 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001536 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1537 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1538 eIsFg isFg = IS_FG_OFF;
1539 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001540
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001541 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1542 __FUNCTION__, layer, zOrder, dest);
1543
Saurabh Shah88e4d272013-09-03 13:31:29 -07001544 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001545 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001546}
1547
Saurabh Shah88e4d272013-09-03 13:31:29 -07001548bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001549 hwc_display_contents_1_t* list) {
1550 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001551
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001552 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001553
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001554 hwc_layer_1_t* layer = &list->hwLayers[index];
1555 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301556 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001557 if(allocSplitVGPipesfor4k2k(ctx, index)){
radhakrishnac9a67412013-09-25 17:40:42 +05301558 continue;
1559 }
1560 }
1561
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001562 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001563 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001564 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001565 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001566 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001567 ePipeType type = MDPCOMP_OV_ANY;
1568
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001569 if(isYuvBuffer(hnd)) {
1570 type = MDPCOMP_OV_VG;
Prabhanjan Kandula47191dc2014-01-22 23:01:45 +05301571 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
1572 (ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres > 1024)) {
1573 if(qhwc::needsScaling(layer))
1574 type = MDPCOMP_OV_RGB;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301575 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001576 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1577 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001578 type = MDPCOMP_OV_DMA;
1579 }
1580
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001581 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001582 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001583 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1584 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001585 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001586 }
1587 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001588 return true;
1589}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001590
radhakrishnac9a67412013-09-25 17:40:42 +05301591int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1592 PipeLayerPair& PipeLayerPair) {
1593 MdpYUVPipeInfo& mdp_info =
1594 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1595 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1596 eIsFg isFg = IS_FG_OFF;
1597 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1598 eDest lDest = mdp_info.lIndex;
1599 eDest rDest = mdp_info.rIndex;
1600
1601 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1602 lDest, rDest, &PipeLayerPair.rot);
1603}
1604
Saurabh Shah88e4d272013-09-03 13:31:29 -07001605bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001606
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001607 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001608 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1609 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001610 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001611
1612 if(!ctx || !list) {
1613 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001614 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001615 }
1616
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301617 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1618 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1619 return true;
1620 }
1621
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001622 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001623 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08001624 idleInvalidator->handleUpdateEvent();
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001625
1626 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001627 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001628
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001629 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1630 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001631 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001632 if(mCurrentFrame.isFBComposed[i]) continue;
1633
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001634 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001635 private_handle_t *hnd = (private_handle_t *)layer->handle;
1636 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001637 if (!(layer->flags & HWC_COLOR_FILL)) {
1638 ALOGE("%s handle null", __FUNCTION__);
1639 return false;
1640 }
1641 // No PLAY for Color layer
1642 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1643 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001644 }
1645
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001646 int mdpIndex = mCurrentFrame.layerToMDP[i];
1647
radhakrishnac9a67412013-09-25 17:40:42 +05301648 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1649 {
1650 MdpYUVPipeInfo& pipe_info =
1651 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1652 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1653 ovutils::eDest indexL = pipe_info.lIndex;
1654 ovutils::eDest indexR = pipe_info.rIndex;
1655 int fd = hnd->fd;
1656 uint32_t offset = hnd->offset;
1657 if(rot) {
1658 rot->queueBuffer(fd, offset);
1659 fd = rot->getDstMemId();
1660 offset = rot->getDstOffset();
1661 }
1662 if(indexL != ovutils::OV_INVALID) {
1663 ovutils::eDest destL = (ovutils::eDest)indexL;
1664 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1665 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1666 if (!ov.queueBuffer(fd, offset, destL)) {
1667 ALOGE("%s: queueBuffer failed for display:%d",
1668 __FUNCTION__, mDpy);
1669 return false;
1670 }
1671 }
1672
1673 if(indexR != ovutils::OV_INVALID) {
1674 ovutils::eDest destR = (ovutils::eDest)indexR;
1675 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1676 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1677 if (!ov.queueBuffer(fd, offset, destR)) {
1678 ALOGE("%s: queueBuffer failed for display:%d",
1679 __FUNCTION__, mDpy);
1680 return false;
1681 }
1682 }
1683 }
1684 else{
1685 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001686 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301687 ovutils::eDest dest = pipe_info.index;
1688 if(dest == ovutils::OV_INVALID) {
1689 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001690 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301691 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001692
radhakrishnac9a67412013-09-25 17:40:42 +05301693 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1694 continue;
1695 }
1696
1697 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1698 using pipe: %d", __FUNCTION__, layer,
1699 hnd, dest );
1700
1701 int fd = hnd->fd;
1702 uint32_t offset = hnd->offset;
1703
1704 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1705 if(rot) {
1706 if(!rot->queueBuffer(fd, offset))
1707 return false;
1708 fd = rot->getDstMemId();
1709 offset = rot->getDstOffset();
1710 }
1711
1712 if (!ov.queueBuffer(fd, offset, dest)) {
1713 ALOGE("%s: queueBuffer failed for display:%d ",
1714 __FUNCTION__, mDpy);
1715 return false;
1716 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001717 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001718
1719 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001720 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001721 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001722}
1723
Saurabh Shah88e4d272013-09-03 13:31:29 -07001724//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001725
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001726void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301727 hwc_display_contents_1_t* list){
1728 //if 4kx2k yuv layer is totally present in either in left half
1729 //or right half then try splitting the yuv layer to avoid decimation
1730 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1731 const int lSplit = getLeftSplit(ctx, mDpy);
1732 for(int index = 0; index < n4k2kYuvCount; index++){
1733 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1734 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1735 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001736 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301737 mCurrentFrame.mdpCount += 1;
1738 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001739 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1740 mCurrentFrame.fbZ += 1;
1741 }
radhakrishnac9a67412013-09-25 17:40:42 +05301742 }
1743}
1744
Saurabh Shah88e4d272013-09-03 13:31:29 -07001745bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1746 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001747 ePipeType type) {
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001748 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001749
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001750 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001751 pipe_info.lIndex = ovutils::OV_INVALID;
1752 pipe_info.rIndex = ovutils::OV_INVALID;
1753
1754 if (dst.left < lSplit) {
1755 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001756 if(pipe_info.lIndex == ovutils::OV_INVALID)
1757 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001758 }
1759
1760 if(dst.right > lSplit) {
1761 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1762 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001763 return false;
1764 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001765
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001766 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001767}
1768
Saurabh Shah88e4d272013-09-03 13:31:29 -07001769bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001770 hwc_display_contents_1_t* list) {
1771 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001772
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001773 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001774
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001775 hwc_layer_1_t* layer = &list->hwLayers[index];
1776 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301777 hwc_rect_t dst = layer->displayFrame;
1778 const int lSplit = getLeftSplit(ctx, mDpy);
1779 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1780 if((dst.left > lSplit)||(dst.right < lSplit)){
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001781 if(allocSplitVGPipesfor4k2k(ctx, index)){
radhakrishnac9a67412013-09-25 17:40:42 +05301782 continue;
1783 }
1784 }
1785 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001786 int mdpIndex = mCurrentFrame.layerToMDP[index];
1787 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001788 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001789 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001790 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001791 ePipeType type = MDPCOMP_OV_ANY;
1792
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001793 if(isYuvBuffer(hnd)) {
1794 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001795 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001796 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001797 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001798 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001799 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001800
1801 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001802 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1803 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001804 return false;
1805 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001806 }
1807 return true;
1808}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001809
radhakrishnac9a67412013-09-25 17:40:42 +05301810int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1811 PipeLayerPair& PipeLayerPair) {
1812 const int lSplit = getLeftSplit(ctx, mDpy);
1813 hwc_rect_t dst = layer->displayFrame;
1814 if((dst.left > lSplit)||(dst.right < lSplit)){
1815 MdpYUVPipeInfo& mdp_info =
1816 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1817 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1818 eIsFg isFg = IS_FG_OFF;
1819 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1820 eDest lDest = mdp_info.lIndex;
1821 eDest rDest = mdp_info.rIndex;
1822
1823 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1824 lDest, rDest, &PipeLayerPair.rot);
1825 }
1826 else{
1827 return configure(ctx, layer, PipeLayerPair);
1828 }
1829}
1830
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001831/*
1832 * Configures pipe(s) for MDP composition
1833 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001834int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001835 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001836 MdpPipeInfoSplit& mdp_info =
1837 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001838 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1839 eIsFg isFg = IS_FG_OFF;
1840 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1841 eDest lDest = mdp_info.lIndex;
1842 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001843
1844 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
1845 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
1846
Saurabh Shah88e4d272013-09-03 13:31:29 -07001847 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001848 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001849}
1850
Saurabh Shah88e4d272013-09-03 13:31:29 -07001851bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001852
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001853 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001854 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1855 return true;
1856 }
1857
1858 if(!ctx || !list) {
1859 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001860 return false;
1861 }
1862
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301863 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1864 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1865 return true;
1866 }
1867
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001868 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001869 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08001870 idleInvalidator->handleUpdateEvent();
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001871
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001872 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001873 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001874
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001875 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1876 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001877 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001878 if(mCurrentFrame.isFBComposed[i]) continue;
1879
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001880 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001881 private_handle_t *hnd = (private_handle_t *)layer->handle;
1882 if(!hnd) {
1883 ALOGE("%s handle null", __FUNCTION__);
1884 return false;
1885 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001886
1887 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1888 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001889 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001890
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001891 int mdpIndex = mCurrentFrame.layerToMDP[i];
1892
radhakrishnac9a67412013-09-25 17:40:42 +05301893 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1894 {
1895 MdpYUVPipeInfo& pipe_info =
1896 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1897 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1898 ovutils::eDest indexL = pipe_info.lIndex;
1899 ovutils::eDest indexR = pipe_info.rIndex;
1900 int fd = hnd->fd;
1901 uint32_t offset = hnd->offset;
1902 if(rot) {
1903 rot->queueBuffer(fd, offset);
1904 fd = rot->getDstMemId();
1905 offset = rot->getDstOffset();
1906 }
1907 if(indexL != ovutils::OV_INVALID) {
1908 ovutils::eDest destL = (ovutils::eDest)indexL;
1909 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1910 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1911 if (!ov.queueBuffer(fd, offset, destL)) {
1912 ALOGE("%s: queueBuffer failed for display:%d",
1913 __FUNCTION__, mDpy);
1914 return false;
1915 }
1916 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001917
radhakrishnac9a67412013-09-25 17:40:42 +05301918 if(indexR != ovutils::OV_INVALID) {
1919 ovutils::eDest destR = (ovutils::eDest)indexR;
1920 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1921 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1922 if (!ov.queueBuffer(fd, offset, destR)) {
1923 ALOGE("%s: queueBuffer failed for display:%d",
1924 __FUNCTION__, mDpy);
1925 return false;
1926 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07001927 }
1928 }
radhakrishnac9a67412013-09-25 17:40:42 +05301929 else{
1930 MdpPipeInfoSplit& pipe_info =
1931 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1932 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07001933
radhakrishnac9a67412013-09-25 17:40:42 +05301934 ovutils::eDest indexL = pipe_info.lIndex;
1935 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001936
radhakrishnac9a67412013-09-25 17:40:42 +05301937 int fd = hnd->fd;
1938 int offset = hnd->offset;
1939
1940 if(ctx->mAD->isModeOn()) {
1941 if(ctx->mAD->draw(ctx, fd, offset)) {
Arun Kumar K.R2aa44c62014-01-21 23:08:28 -08001942 fd = ctx->mAD->getDstFd();
1943 offset = ctx->mAD->getDstOffset();
radhakrishnac9a67412013-09-25 17:40:42 +05301944 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001945 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001946
radhakrishnac9a67412013-09-25 17:40:42 +05301947 if(rot) {
1948 rot->queueBuffer(fd, offset);
1949 fd = rot->getDstMemId();
1950 offset = rot->getDstOffset();
1951 }
1952
1953 //************* play left mixer **********
1954 if(indexL != ovutils::OV_INVALID) {
1955 ovutils::eDest destL = (ovutils::eDest)indexL;
1956 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1957 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1958 if (!ov.queueBuffer(fd, offset, destL)) {
1959 ALOGE("%s: queueBuffer failed for left mixer",
1960 __FUNCTION__);
1961 return false;
1962 }
1963 }
1964
1965 //************* play right mixer **********
1966 if(indexR != ovutils::OV_INVALID) {
1967 ovutils::eDest destR = (ovutils::eDest)indexR;
1968 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1969 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1970 if (!ov.queueBuffer(fd, offset, destR)) {
1971 ALOGE("%s: queueBuffer failed for right mixer",
1972 __FUNCTION__);
1973 return false;
1974 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001975 }
1976 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001977
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001978 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1979 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001980
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001981 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001982}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001983}; //namespace
1984