blob: 91bc233c770b68d952fc9773f8f9c2e25b23bcad [file] [log] [blame]
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001/*
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved.
Naseer Ahmed7c958d42012-07-31 18:57:03 -07003 * Not a Contribution, Apache license notifications and license are retained
4 * for attribution purposes only.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
Saurabh Shah4fdde762013-04-30 18:47:33 -070019#include <math.h>
Naseer Ahmed7c958d42012-07-31 18:57:03 -070020#include "hwc_mdpcomp.h"
Naseer Ahmed54821fe2012-11-28 18:44:38 -050021#include <sys/ioctl.h>
Saurabh Shah56f610d2012-08-07 15:27:06 -070022#include "external.h"
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070023#include "virtual.h"
Ramkumar Radhakrishnan47573e22012-11-07 11:36:41 -080024#include "qdMetaData.h"
Ramkumar Radhakrishnan288f8c72013-01-15 11:37:54 -080025#include "mdp_version.h"
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -070026#include "hwc_fbupdate.h"
Saurabh Shaha9da08f2013-07-03 13:27:53 -070027#include "hwc_ad.h"
Saurabh Shahacf10202013-02-26 10:15:15 -080028#include <overlayRotator.h>
29
Saurabh Shah85234ec2013-04-12 17:09:00 -070030using namespace overlay;
Saurabh Shahbd2d0832013-04-04 14:33:08 -070031using namespace qdutils;
Saurabh Shahacf10202013-02-26 10:15:15 -080032using namespace overlay::utils;
33namespace ovutils = overlay::utils;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070034
Naseer Ahmed7c958d42012-07-31 18:57:03 -070035namespace qhwc {
36
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080037//==============MDPComp========================================================
38
Naseer Ahmed7c958d42012-07-31 18:57:03 -070039IdleInvalidator *MDPComp::idleInvalidator = NULL;
40bool MDPComp::sIdleFallBack = false;
41bool MDPComp::sDebugLogs = false;
Naseer Ahmed54821fe2012-11-28 18:44:38 -050042bool MDPComp::sEnabled = false;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -070043bool MDPComp::sEnableMixedMode = true;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070044bool MDPComp::sEnablePartialFrameUpdate = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080045int MDPComp::sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shahf5f2b132013-11-25 12:08:35 -080046double MDPComp::sMaxBw = 0.0;
Saurabh Shah3c1a6b02013-11-22 11:10:20 -080047double MDPComp::sBwClaimed = 0.0;
radhakrishnac9a67412013-09-25 17:40:42 +053048bool MDPComp::sEnable4k2kYUVSplit = false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -070049
Saurabh Shah88e4d272013-09-03 13:31:29 -070050MDPComp* MDPComp::getObject(hwc_context_t *ctx, const int& dpy) {
51 if(isDisplaySplit(ctx, dpy)) {
52 return new MDPCompSplit(dpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -080053 }
Saurabh Shah88e4d272013-09-03 13:31:29 -070054 return new MDPCompNonSplit(dpy);
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080055}
56
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080057MDPComp::MDPComp(int dpy):mDpy(dpy){};
58
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080059void MDPComp::dump(android::String8& buf)
60{
Jeykumar Sankaran3c6bb042013-08-15 14:01:04 -070061 if(mCurrentFrame.layerCount > MAX_NUM_APP_LAYERS)
62 return;
63
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080064 dumpsys_log(buf,"HWC Map for Dpy: %s \n",
Jeykumar Sankaran27dee262013-08-01 17:09:54 -070065 (mDpy == 0) ? "\"PRIMARY\"" :
66 (mDpy == 1) ? "\"EXTERNAL\"" : "\"VIRTUAL\"");
Saurabh Shahe9bc60f2013-08-29 12:58:06 -070067 dumpsys_log(buf,"CURR_FRAME: layerCount:%2d mdpCount:%2d "
68 "fbCount:%2d \n", mCurrentFrame.layerCount,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080069 mCurrentFrame.mdpCount, mCurrentFrame.fbCount);
70 dumpsys_log(buf,"needsFBRedraw:%3s pipesUsed:%2d MaxPipesPerMixer: %d \n",
71 (mCurrentFrame.needsRedraw? "YES" : "NO"),
72 mCurrentFrame.mdpCount, sMaxPipesPerMixer);
73 dumpsys_log(buf," --------------------------------------------- \n");
74 dumpsys_log(buf," listIdx | cached? | mdpIndex | comptype | Z \n");
75 dumpsys_log(buf," --------------------------------------------- \n");
76 for(int index = 0; index < mCurrentFrame.layerCount; index++ )
77 dumpsys_log(buf," %7d | %7s | %8d | %9s | %2d \n",
78 index,
79 (mCurrentFrame.isFBComposed[index] ? "YES" : "NO"),
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070080 mCurrentFrame.layerToMDP[index],
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080081 (mCurrentFrame.isFBComposed[index] ?
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -070082 (mCurrentFrame.drop[index] ? "DROP" :
83 (mCurrentFrame.needsRedraw ? "GLES" : "CACHE")) : "MDP"),
Jeykumar Sankaran85977e32013-02-25 17:06:08 -080084 (mCurrentFrame.isFBComposed[index] ? mCurrentFrame.fbZ :
85 mCurrentFrame.mdpToLayer[mCurrentFrame.layerToMDP[index]].pipeInfo->zOrder));
86 dumpsys_log(buf,"\n");
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080087}
88
89bool MDPComp::init(hwc_context_t *ctx) {
90
91 if(!ctx) {
92 ALOGE("%s: Invalid hwc context!!",__FUNCTION__);
93 return false;
94 }
95
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -080096 char property[PROPERTY_VALUE_MAX];
97
98 sEnabled = false;
99 if((property_get("persist.hwc.mdpcomp.enable", property, NULL) > 0) &&
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800100 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
101 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800102 sEnabled = true;
103 }
104
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700105 sEnableMixedMode = true;
106 if((property_get("debug.mdpcomp.mixedmode.disable", property, NULL) > 0) &&
107 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
108 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
109 sEnableMixedMode = false;
110 }
111
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800112 if(property_get("debug.mdpcomp.logs", property, NULL) > 0) {
113 if(atoi(property) != 0)
114 sDebugLogs = true;
115 }
116
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700117 if(property_get("persist.hwc.partialupdate.enable", property, NULL) > 0) {
118 if((atoi(property) != 0) && ctx->mMDP.panel == MIPI_CMD_PANEL &&
119 qdutils::MDPVersion::getInstance().is8x74v2())
120 sEnablePartialFrameUpdate = true;
121 }
122 ALOGE_IF(isDebug(), "%s: Partial Update applicable?: %d",__FUNCTION__,
123 sEnablePartialFrameUpdate);
124
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800125 sMaxPipesPerMixer = MAX_PIPES_PER_MIXER;
Saurabh Shah85234ec2013-04-12 17:09:00 -0700126 if(property_get("debug.mdpcomp.maxpermixer", property, "-1") > 0) {
127 int val = atoi(property);
128 if(val >= 0)
129 sMaxPipesPerMixer = min(val, MAX_PIPES_PER_MIXER);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800130 }
131
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400132 if(ctx->mMDP.panel != MIPI_CMD_PANEL) {
133 // Idle invalidation is not necessary on command mode panels
134 long idle_timeout = DEFAULT_IDLE_TIME;
135 if(property_get("debug.mdpcomp.idletime", property, NULL) > 0) {
136 if(atoi(property) != 0)
137 idle_timeout = atoi(property);
138 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800139
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400140 //create Idle Invalidator only when not disabled through property
141 if(idle_timeout != -1)
142 idleInvalidator = IdleInvalidator::getInstance();
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800143
Naseer Ahmedf40f2c82013-08-14 16:42:40 -0400144 if(idleInvalidator == NULL) {
145 ALOGE("%s: failed to instantiate idleInvalidator object",
146 __FUNCTION__);
147 } else {
148 idleInvalidator->init(timeout_handler, ctx, idle_timeout);
149 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800150 }
radhakrishnac9a67412013-09-25 17:40:42 +0530151
152 if((property_get("debug.mdpcomp.4k2kSplit", property, "0") > 0) &&
153 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
154 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
155 sEnable4k2kYUVSplit = true;
156 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700157 return true;
158}
159
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800160void MDPComp::reset(hwc_context_t *ctx) {
161 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700162 mCurrentFrame.reset(numLayers);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800163 ctx->mOverlay->clear(mDpy);
164 ctx->mLayerRotMap[mDpy]->clear();
Saurabh Shah2a4eb1b2013-07-22 16:33:23 -0700165}
166
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700167void MDPComp::timeout_handler(void *udata) {
168 struct hwc_context_t* ctx = (struct hwc_context_t*)(udata);
169
170 if(!ctx) {
171 ALOGE("%s: received empty data in timer callback", __FUNCTION__);
172 return;
173 }
174
Jesse Hall3be78d92012-08-21 15:12:23 -0700175 if(!ctx->proc) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700176 ALOGE("%s: HWC proc not registered", __FUNCTION__);
177 return;
178 }
179 sIdleFallBack = true;
180 /* Trigger SF to redraw the current frame */
Jesse Hall3be78d92012-08-21 15:12:23 -0700181 ctx->proc->invalidate(ctx->proc);
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700182}
183
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800184void MDPComp::setMDPCompLayerFlags(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800185 hwc_display_contents_1_t* list) {
186 LayerProp *layerProp = ctx->layerProp[mDpy];
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800187
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800188 for(int index = 0; index < ctx->listStats[mDpy].numAppLayers; index++) {
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800189 hwc_layer_1_t* layer = &(list->hwLayers[index]);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800190 if(!mCurrentFrame.isFBComposed[index]) {
191 layerProp[index].mFlags |= HWC_MDPCOMP;
192 layer->compositionType = HWC_OVERLAY;
193 layer->hints |= HWC_HINT_CLEAR_FB;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800194 } else {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700195 /* Drop the layer when its already present in FB OR when it lies
196 * outside frame's ROI */
197 if(!mCurrentFrame.needsRedraw || mCurrentFrame.drop[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800198 layer->compositionType = HWC_OVERLAY;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700199 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800200 }
201 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -0700202}
Naseer Ahmed54821fe2012-11-28 18:44:38 -0500203
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800204void MDPComp::setRedraw(hwc_context_t *ctx,
205 hwc_display_contents_1_t* list) {
206 mCurrentFrame.needsRedraw = false;
207 if(!mCachedFrame.isSameFrame(mCurrentFrame, list) ||
208 (list->flags & HWC_GEOMETRY_CHANGED) ||
209 isSkipPresent(ctx, mDpy)) {
210 mCurrentFrame.needsRedraw = true;
211 }
212}
213
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800214MDPComp::FrameInfo::FrameInfo() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700215 reset(0);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800216}
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800217
Saurabh Shahaa236822013-04-24 18:07:26 -0700218void MDPComp::FrameInfo::reset(const int& numLayers) {
219 for(int i = 0 ; i < MAX_PIPES_PER_MIXER && numLayers; i++ ) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800220 if(mdpToLayer[i].pipeInfo) {
221 delete mdpToLayer[i].pipeInfo;
222 mdpToLayer[i].pipeInfo = NULL;
223 //We dont own the rotator
224 mdpToLayer[i].rot = NULL;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800225 }
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800226 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800227
228 memset(&mdpToLayer, 0, sizeof(mdpToLayer));
229 memset(&layerToMDP, -1, sizeof(layerToMDP));
Saurabh Shahaa236822013-04-24 18:07:26 -0700230 memset(&isFBComposed, 1, sizeof(isFBComposed));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800231
Saurabh Shahaa236822013-04-24 18:07:26 -0700232 layerCount = numLayers;
233 fbCount = numLayers;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800234 mdpCount = 0;
Saurabh Shah2f3895f2013-05-02 10:13:31 -0700235 needsRedraw = true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800236 fbZ = 0;
237}
238
Saurabh Shahaa236822013-04-24 18:07:26 -0700239void MDPComp::FrameInfo::map() {
240 // populate layer and MDP maps
241 int mdpIdx = 0;
242 for(int idx = 0; idx < layerCount; idx++) {
243 if(!isFBComposed[idx]) {
244 mdpToLayer[mdpIdx].listIndex = idx;
245 layerToMDP[idx] = mdpIdx++;
246 }
247 }
248}
249
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800250MDPComp::LayerCache::LayerCache() {
251 reset();
252}
253
254void MDPComp::LayerCache::reset() {
Saurabh Shahaa236822013-04-24 18:07:26 -0700255 memset(&hnd, 0, sizeof(hnd));
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530256 memset(&isFBComposed, true, sizeof(isFBComposed));
257 memset(&drop, false, sizeof(drop));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800258 layerCount = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -0700259}
260
261void MDPComp::LayerCache::cacheAll(hwc_display_contents_1_t* list) {
262 const int numAppLayers = list->numHwLayers - 1;
263 for(int i = 0; i < numAppLayers; i++) {
264 hnd[i] = list->hwLayers[i].handle;
265 }
266}
267
268void MDPComp::LayerCache::updateCounts(const FrameInfo& curFrame) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700269 layerCount = curFrame.layerCount;
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530270 memcpy(&isFBComposed, &curFrame.isFBComposed, sizeof(isFBComposed));
271 memcpy(&drop, &curFrame.drop, sizeof(drop));
272}
273
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800274bool MDPComp::LayerCache::isSameFrame(const FrameInfo& curFrame,
275 hwc_display_contents_1_t* list) {
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530276 if(layerCount != curFrame.layerCount)
277 return false;
278 for(int i = 0; i < curFrame.layerCount; i++) {
279 if((curFrame.isFBComposed[i] != isFBComposed[i]) ||
280 (curFrame.drop[i] != drop[i])) {
281 return false;
282 }
Jeykumar Sankaran988d3682013-11-15 11:57:16 -0800283 if(curFrame.isFBComposed[i] &&
284 (hnd[i] != list->hwLayers[i].handle)){
285 return false;
286 }
Prabhanjan Kandula2243aa62013-10-24 12:58:55 +0530287 }
288 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -0800289}
290
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700291bool MDPComp::isSupportedForMDPComp(hwc_context_t *ctx, hwc_layer_1_t* layer) {
292 private_handle_t *hnd = (private_handle_t *)layer->handle;
293 if((not isYuvBuffer(hnd) and has90Transform(layer)) or
294 (not isValidDimension(ctx,layer))
295 //More conditions here, SKIP, sRGB+Blend etc
296 ) {
297 return false;
298 }
299 return true;
300}
301
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530302bool MDPComp::isValidDimension(hwc_context_t *ctx, hwc_layer_1_t *layer) {
Saurabh Shah4fdde762013-04-30 18:47:33 -0700303 const int dpy = HWC_DISPLAY_PRIMARY;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800304 private_handle_t *hnd = (private_handle_t *)layer->handle;
305
306 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -0700307 if (layer->flags & HWC_COLOR_FILL) {
308 // Color layer
309 return true;
310 }
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800311 ALOGE("%s: layer handle is NULL", __FUNCTION__);
312 return false;
313 }
314
Naseer Ahmede850a802013-09-06 13:12:52 -0400315 //XXX: Investigate doing this with pixel phase on MDSS
Naseer Ahmede77f8082013-10-10 13:42:48 -0400316 if(!isSecureBuffer(hnd) && isNonIntegralSourceCrop(layer->sourceCropf))
Naseer Ahmede850a802013-09-06 13:12:52 -0400317 return false;
318
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800319 int hw_w = ctx->dpyAttr[mDpy].xres;
320 int hw_h = ctx->dpyAttr[mDpy].yres;
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800321
Saurabh Shah62e1d732013-09-17 10:44:05 -0700322 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700323 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700324 int crop_w = crop.right - crop.left;
325 int crop_h = crop.bottom - crop.top;
326 int dst_w = dst.right - dst.left;
327 int dst_h = dst.bottom - dst.top;
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800328 float w_scale = ((float)crop_w / (float)dst_w);
329 float h_scale = ((float)crop_h / (float)dst_h);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700330
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800331 /* Workaround for MDP HW limitation in DSI command mode panels where
332 * FPS will not go beyond 30 if buffers on RGB pipes are of width or height
333 * less than 5 pixels
Sravan Kumar D.V.Nad5d9292013-04-24 14:23:04 +0530334 * There also is a HW limilation in MDP, minimum block size is 2x2
335 * Fallback to GPU if height is less than 2.
336 */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800337 if((crop_w < 5)||(crop_h < 5))
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800338 return false;
339
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800340 if((w_scale > 1.0f) || (h_scale > 1.0f)) {
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800341 const uint32_t downscale =
Saurabh Shah4fdde762013-04-30 18:47:33 -0700342 qdutils::MDPVersion::getInstance().getMaxMDPDownscale();
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800343 const float w_dscale = w_scale;
344 const float h_dscale = h_scale;
345
Jeykumar Sankaran1706a772013-11-27 12:55:19 -0800346 if(ctx->mMDP.version >= qdutils::MDSS_V5) {
347 /* Workaround for downscales larger than 4x.
348 * Will be removed once decimator block is enabled for MDSS
349 */
350 if(!qdutils::MDPVersion::getInstance().supportsDecimation()) {
351 if(crop_w > MAX_DISPLAY_DIM || w_dscale > downscale ||
352 h_dscale > downscale)
353 return false;
354 } else {
355 if(w_dscale > 64 || h_dscale > 64)
356 return false;
357 }
358 } else { //A-family
359 if(w_dscale > downscale || h_dscale > downscale)
Saurabh Shah4fdde762013-04-30 18:47:33 -0700360 return false;
361 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700362 }
363
Jeykumar Sankaran6cd8e7e2014-01-13 16:01:05 -0800364 if((w_scale < 1.0f) || (h_scale < 1.0f)) {
365 const uint32_t upscale =
366 qdutils::MDPVersion::getInstance().getMaxMDPUpscale();
367 const float w_uscale = 1.0f / w_scale;
368 const float h_uscale = 1.0f / h_scale;
369
370 if(w_uscale > upscale || h_uscale > upscale)
371 return false;
372 }
373
Jeykumar Sankaranc18dbc22013-02-08 14:29:44 -0800374 return true;
375}
376
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700377ovutils::eDest MDPComp::getMdpPipe(hwc_context_t *ctx, ePipeType type,
378 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800379 overlay::Overlay& ov = *ctx->mOverlay;
380 ovutils::eDest mdp_pipe = ovutils::OV_INVALID;
381
382 switch(type) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800383 case MDPCOMP_OV_DMA:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700384 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_DMA, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800385 if(mdp_pipe != ovutils::OV_INVALID) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800386 return mdp_pipe;
387 }
388 case MDPCOMP_OV_ANY:
389 case MDPCOMP_OV_RGB:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700390 mdp_pipe = ov.nextPipe(ovutils::OV_MDP_PIPE_RGB, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800391 if(mdp_pipe != ovutils::OV_INVALID) {
392 return mdp_pipe;
393 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800394
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800395 if(type == MDPCOMP_OV_RGB) {
396 //Requested only for RGB pipe
397 break;
398 }
399 case MDPCOMP_OV_VG:
Saurabh Shahaf5f5972013-07-30 13:56:35 -0700400 return ov.nextPipe(ovutils::OV_MDP_PIPE_VG, mDpy, mixer);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800401 default:
402 ALOGE("%s: Invalid pipe type",__FUNCTION__);
403 return ovutils::OV_INVALID;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800404 };
405 return ovutils::OV_INVALID;
406}
407
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800408bool MDPComp::isFrameDoable(hwc_context_t *ctx) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700409 bool ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700410 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800411
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800412 if(!isEnabled()) {
413 ALOGD_IF(isDebug(),"%s: MDP Comp. not enabled.", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700414 ret = false;
Saurabh Shahd4e65852013-06-17 11:33:53 -0700415 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800416 ctx->mVideoTransFlag &&
417 isSecondaryConnected(ctx)) {
Saurabh Shahd4e65852013-06-17 11:33:53 -0700418 //1 Padding round to shift pipes across mixers
419 ALOGD_IF(isDebug(),"%s: MDP Comp. video transition padding round",
420 __FUNCTION__);
421 ret = false;
Ramkumar Radhakrishnan8bb48d32013-12-30 23:11:27 -0800422 } else if(isSecondaryConfiguring(ctx)) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800423 ALOGD_IF( isDebug(),"%s: External Display connection is pending",
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800424 __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700425 ret = false;
Saurabh Shahaa236822013-04-24 18:07:26 -0700426 } else if(ctx->isPaddingRound) {
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700427 ctx->isPaddingRound = false;
428 ALOGD_IF(isDebug(), "%s: padding round",__FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -0700429 ret = false;
Saurabh Shah0ceeb6a2013-04-23 10:46:07 -0700430 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700431 return ret;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800432}
433
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800434/*
435 * 1) Identify layers that are not visible in the updating ROI and drop them
436 * from composition.
437 * 2) If we have a scaling layers which needs cropping against generated ROI.
438 * Reset ROI to full resolution.
439 */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700440bool MDPComp::validateAndApplyROI(hwc_context_t *ctx,
441 hwc_display_contents_1_t* list, hwc_rect_t roi) {
442 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
443
444 if(!isValidRect(roi))
445 return false;
446
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800447 hwc_rect_t visibleRect = roi;
448
449 for(int i = numAppLayers - 1; i >= 0; i--){
450
451 if(!isValidRect(visibleRect)) {
452 mCurrentFrame.drop[i] = true;
453 mCurrentFrame.dropCount++;
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800454 continue;
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800455 }
456
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700457 const hwc_layer_1_t* layer = &list->hwLayers[i];
458
459 hwc_rect_t dstRect = layer->displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700460 hwc_rect_t srcRect = integerizeSourceCrop(layer->sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700461 int transform = layer->transform;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700462
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800463 hwc_rect_t res = getIntersection(visibleRect, dstRect);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700464
465 int res_w = res.right - res.left;
466 int res_h = res.bottom - res.top;
467 int dst_w = dstRect.right - dstRect.left;
468 int dst_h = dstRect.bottom - dstRect.top;
469
470 if(!isValidRect(res)) {
471 mCurrentFrame.drop[i] = true;
472 mCurrentFrame.dropCount++;
473 }else {
474 /* Reset frame ROI when any layer which needs scaling also needs ROI
475 * cropping */
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800476 if((res_w != dst_w || res_h != dst_h) && needsScaling (layer)) {
Arpita Banerjeed8965982013-11-08 17:27:33 -0800477 ALOGI("%s: Resetting ROI due to scaling", __FUNCTION__);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700478 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
479 mCurrentFrame.dropCount = 0;
480 return false;
481 }
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800482
Jeykumar Sankaran0b961452014-01-21 17:26:12 -0800483 /* deduct any opaque region from visibleRect */
484 if (layer->blending == HWC_BLENDING_NONE)
485 visibleRect = deductRect(visibleRect, res);
486 }
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700487 }
488 return true;
489}
490
491void MDPComp::generateROI(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
492 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
493
494 if(!sEnablePartialFrameUpdate) {
495 return;
496 }
497
498 if(mDpy || isDisplaySplit(ctx, mDpy)){
499 ALOGE_IF(isDebug(), "%s: ROI not supported for"
500 "the (1) external / virtual display's (2) dual DSI displays",
501 __FUNCTION__);
502 return;
503 }
504
Jeykumar Sankaran862d87c2013-11-08 16:47:26 -0800505 if(isSkipPresent(ctx, mDpy))
506 return;
507
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700508 if(list->flags & HWC_GEOMETRY_CHANGED)
509 return;
510
511 struct hwc_rect roi = (struct hwc_rect){0, 0, 0, 0};
512 for(int index = 0; index < numAppLayers; index++ ) {
513 if ((mCachedFrame.hnd[index] != list->hwLayers[index].handle) ||
514 isYuvBuffer((private_handle_t *)list->hwLayers[index].handle)) {
515 hwc_rect_t dstRect = list->hwLayers[index].displayFrame;
Arun Kumar K.R91090c72013-10-28 19:40:18 -0700516 hwc_rect_t srcRect = integerizeSourceCrop(
517 list->hwLayers[index].sourceCropf);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700518 int transform = list->hwLayers[index].transform;
519
520 /* Intersect against display boundaries */
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700521 roi = getUnion(roi, dstRect);
522 }
523 }
524
525 if(!validateAndApplyROI(ctx, list, roi)){
526 roi = (struct hwc_rect) {0, 0,
527 (int)ctx->dpyAttr[mDpy].xres, (int)ctx->dpyAttr[mDpy].yres};
528 }
529
530 ctx->listStats[mDpy].roi.x = roi.left;
531 ctx->listStats[mDpy].roi.y = roi.top;
532 ctx->listStats[mDpy].roi.w = roi.right - roi.left;
533 ctx->listStats[mDpy].roi.h = roi.bottom - roi.top;
534
535 ALOGD_IF(isDebug(),"%s: generated ROI: [%d, %d, %d, %d]", __FUNCTION__,
536 roi.left, roi.top, roi.right, roi.bottom);
537}
538
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800539/* Checks for conditions where all the layers marked for MDP comp cannot be
540 * bypassed. On such conditions we try to bypass atleast YUV layers */
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800541bool MDPComp::tryFullFrame(hwc_context_t *ctx,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800542 hwc_display_contents_1_t* list){
543
Saurabh Shahaa236822013-04-24 18:07:26 -0700544 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800545 int priDispW = ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800546
Ramkumar Radhakrishnanba713382013-08-30 18:41:07 -0700547 if(sIdleFallBack && !ctx->listStats[mDpy].secureUI) {
Saurabh Shah2d998a92013-05-14 17:55:58 -0700548 ALOGD_IF(isDebug(), "%s: Idle fallback dpy %d",__FUNCTION__, mDpy);
549 return false;
550 }
551
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800552 if(isSkipPresent(ctx, mDpy)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700553 ALOGD_IF(isDebug(),"%s: SKIP present: %d",
554 __FUNCTION__,
555 isSkipPresent(ctx, mDpy));
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800556 return false;
557 }
558
Arun Kumar K.R2e2871c2014-01-10 12:47:06 -0800559 if(mDpy > HWC_DISPLAY_PRIMARY && (priDispW > MAX_DISPLAY_DIM) &&
560 (ctx->dpyAttr[mDpy].xres < MAX_DISPLAY_DIM)) {
561 // Disable MDP comp on Secondary when the primary is highres panel and
562 // the secondary is a normal 1080p, because, MDP comp on secondary under
563 // in such usecase, decimation gets used for downscale and there will be
564 // a quality mismatch when there will be a fallback to GPU comp
565 ALOGD_IF(isDebug(), "%s: Disable MDP Compositon for Secondary Disp",
566 __FUNCTION__);
567 return false;
568 }
569
Ramkumar Radhakrishnan4af1ef02013-12-12 11:53:08 -0800570 // check for action safe flag and downscale mode which requires scaling.
571 if(ctx->dpyAttr[mDpy].mActionSafePresent
572 || ctx->dpyAttr[mDpy].mDownScaleMode) {
573 ALOGD_IF(isDebug(), "%s: Scaling needed for this frame",__FUNCTION__);
574 return false;
575 }
576
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800577 for(int i = 0; i < numAppLayers; ++i) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800578 hwc_layer_1_t* layer = &list->hwLayers[i];
579 private_handle_t *hnd = (private_handle_t *)layer->handle;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -0800580
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700581 if(isYuvBuffer(hnd) && has90Transform(layer)) {
582 if(!canUseRotator(ctx, mDpy)) {
583 ALOGD_IF(isDebug(), "%s: Can't use rotator for dpy %d",
584 __FUNCTION__, mDpy);
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700585 return false;
586 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800587 }
Prabhanjan Kandula9fb032a2013-06-18 17:37:22 +0530588
589 //For 8x26 with panel width>1k, if RGB layer needs HFLIP fail mdp comp
590 // may not need it if Gfx pre-rotation can handle all flips & rotations
591 if(qdutils::MDPVersion::getInstance().is8x26() &&
592 (ctx->dpyAttr[mDpy].xres > 1024) &&
593 (layer->transform & HWC_TRANSFORM_FLIP_H) &&
594 (!isYuvBuffer(hnd)))
595 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800596 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700597
Saurabh Shaha9da08f2013-07-03 13:27:53 -0700598 if(ctx->mAD->isDoable()) {
599 return false;
600 }
601
Saurabh Shahaa236822013-04-24 18:07:26 -0700602 //If all above hard conditions are met we can do full or partial MDP comp.
603 bool ret = false;
604 if(fullMDPComp(ctx, list)) {
605 ret = true;
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700606 } else if(partialMDPComp(ctx, list)) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700607 ret = true;
608 }
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530609
Saurabh Shahaa236822013-04-24 18:07:26 -0700610 return ret;
611}
612
613bool MDPComp::fullMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700614 //Will benefit presentation / secondary-only layer.
615 if((mDpy > HWC_DISPLAY_PRIMARY) &&
616 (list->numHwLayers - 1) > MAX_SEC_LAYERS) {
617 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
618 return false;
619 }
620
621 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
622 for(int i = 0; i < numAppLayers; i++) {
623 hwc_layer_1_t* layer = &list->hwLayers[i];
624 if(not isSupportedForMDPComp(ctx, layer)) {
625 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",__FUNCTION__);
626 return false;
627 }
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800628
629 //For 8x26, if there is only one layer which needs scale for secondary
630 //while no scale for primary display, DMA pipe is occupied by primary.
631 //If need to fall back to GLES composition, virtual display lacks DMA
632 //pipe and error is reported.
633 if(qdutils::MDPVersion::getInstance().is8x26() &&
634 mDpy >= HWC_DISPLAY_EXTERNAL &&
Prabhanjan Kandula21918db2013-11-26 15:51:58 +0530635 qhwc::needsScaling(layer))
Yang Xu9c1eb2b2013-11-26 01:28:13 +0800636 return false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700637 }
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800638
Saurabh Shahaa236822013-04-24 18:07:26 -0700639 mCurrentFrame.fbCount = 0;
640 mCurrentFrame.fbZ = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700641 memcpy(&mCurrentFrame.isFBComposed, &mCurrentFrame.drop,
642 sizeof(mCurrentFrame.isFBComposed));
643 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
644 mCurrentFrame.dropCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700645
radhakrishnac9a67412013-09-25 17:40:42 +0530646 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800647 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530648 }
649
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800650 if(!postHeuristicsHandling(ctx, list)) {
651 ALOGD_IF(isDebug(), "post heuristic handling failed");
652 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700653 return false;
654 }
655
Saurabh Shahaa236822013-04-24 18:07:26 -0700656 return true;
657}
658
659bool MDPComp::partialMDPComp(hwc_context_t *ctx, hwc_display_contents_1_t* list)
660{
Jeykumar Sankaran24c199d2013-05-24 09:40:36 -0700661 if(!sEnableMixedMode) {
662 //Mixed mode is disabled. No need to even try caching.
663 return false;
664 }
665
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700666 bool ret = false;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800667 if(list->flags & HWC_GEOMETRY_CHANGED) { //Try load based first
668 ret = loadBasedCompPreferGPU(ctx, list) or
669 loadBasedCompPreferMDP(ctx, list) or
670 cacheBasedComp(ctx, list);
671 } else {
672 ret = cacheBasedComp(ctx, list) or
673 loadBasedCompPreferGPU(ctx, list) or
Saurabh Shahb772ae32013-11-18 15:40:02 -0800674 loadBasedCompPreferMDP(ctx, list);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700675 }
676
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700677 return ret;
678}
679
680bool MDPComp::cacheBasedComp(hwc_context_t *ctx,
681 hwc_display_contents_1_t* list) {
682 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahaa236822013-04-24 18:07:26 -0700683 mCurrentFrame.reset(numAppLayers);
684 updateLayerCache(ctx, list);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700685
686 //If an MDP marked layer is unsupported cannot do partial MDP Comp
687 for(int i = 0; i < numAppLayers; i++) {
688 if(!mCurrentFrame.isFBComposed[i]) {
689 hwc_layer_1_t* layer = &list->hwLayers[i];
690 if(not isSupportedForMDPComp(ctx, layer)) {
691 ALOGD_IF(isDebug(), "%s: Unsupported layer in list",
692 __FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800693 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700694 return false;
695 }
696 }
697 }
698
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700699 updateYUV(ctx, list, false /*secure only*/);
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +0530700 bool ret = markLayersForCaching(ctx, list); //sets up fbZ also
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700701 if(!ret) {
702 ALOGD_IF(isDebug(),"%s: batching failed, dpy %d",__FUNCTION__, mDpy);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800703 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700704 return false;
705 }
Saurabh Shahaa236822013-04-24 18:07:26 -0700706
707 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700708
radhakrishnac9a67412013-09-25 17:40:42 +0530709 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800710 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530711 }
712
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700713 //Will benefit cases where a video has non-updating background.
714 if((mDpy > HWC_DISPLAY_PRIMARY) and
715 (mdpCount > MAX_SEC_LAYERS)) {
716 ALOGD_IF(isDebug(), "%s: Exceeds max secondary pipes",__FUNCTION__);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800717 reset(ctx);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -0700718 return false;
719 }
720
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800721 if(!postHeuristicsHandling(ctx, list)) {
722 ALOGD_IF(isDebug(), "post heuristic handling failed");
723 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700724 return false;
725 }
726
Saurabh Shahaa236822013-04-24 18:07:26 -0700727 return true;
728}
729
Saurabh Shahb772ae32013-11-18 15:40:02 -0800730bool MDPComp::loadBasedCompPreferGPU(hwc_context_t *ctx,
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700731 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800732 if(not isLoadBasedCompDoable(ctx, list)) {
733 return false;
734 }
735
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700736 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
737 mCurrentFrame.reset(numAppLayers);
738
Saurabh Shahb772ae32013-11-18 15:40:02 -0800739 int stagesForMDP = min(sMaxPipesPerMixer, ctx->mOverlay->availablePipes(
740 mDpy, Overlay::MIXER_DEFAULT));
741 //If MDP has X possible stages, it can take X layers.
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800742 const int batchSize = (numAppLayers - mCurrentFrame.dropCount) -
743 (stagesForMDP - 1); //1 for FB
Saurabh Shahb772ae32013-11-18 15:40:02 -0800744
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700745 if(batchSize <= 0) {
746 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
747 return false;
748 }
749
750 int minBatchStart = -1;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800751 int minBatchEnd = -1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700752 size_t minBatchPixelCount = SIZE_MAX;
753
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800754 /* Iterate through the layer list to find out a contigous batch of batchSize
755 * non-dropped layers with loweest pixel count */
756 for(int i = 0; i <= (numAppLayers - batchSize); i++) {
757 if(mCurrentFrame.drop[i])
758 continue;
759
760 int batchCount = batchSize;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700761 uint32_t batchPixelCount = 0;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800762 int j = i;
763 for(; j < numAppLayers && batchCount; j++){
764 if(!mCurrentFrame.drop[j]) {
765 hwc_layer_1_t* layer = &list->hwLayers[j];
766 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
767 hwc_rect_t dst = layer->displayFrame;
768
769 /* If we have a valid ROI, count pixels only for the MDP fetched
770 * region of the buffer */
771 if((ctx->listStats[mDpy].roi.w != ctx->dpyAttr[mDpy].xres) ||
772 (ctx->listStats[mDpy].roi.h != ctx->dpyAttr[mDpy].yres)) {
773 hwc_rect_t roi;
774 roi.left = ctx->listStats[mDpy].roi.x;
775 roi.top = ctx->listStats[mDpy].roi.y;
776 roi.right = roi.left + ctx->listStats[mDpy].roi.w;
777 roi.bottom = roi.top + ctx->listStats[mDpy].roi.h;
778
779 /* valid ROI means no scaling layer is composed. So check
780 * only intersection to find actual fetched pixels */
781 crop = getIntersection(roi, dst);
782 }
783
784 batchPixelCount += (crop.right - crop.left) *
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700785 (crop.bottom - crop.top);
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800786 batchCount--;
787 }
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700788 }
789
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800790 /* we dont want to program any batch of size lesser than batchSize */
791 if(!batchCount && (batchPixelCount < minBatchPixelCount)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700792 minBatchPixelCount = batchPixelCount;
793 minBatchStart = i;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800794 minBatchEnd = j-1;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700795 }
796 }
797
798 if(minBatchStart < 0) {
799 ALOGD_IF(isDebug(), "%s: No batch found batchSize %d numAppLayers %d",
800 __FUNCTION__, batchSize, numAppLayers);
801 return false;
802 }
803
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800804 /* non-dropped layers falling ouside the selected batch will be marked for
805 * MDP */
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700806 for(int i = 0; i < numAppLayers; i++) {
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800807 if((i < minBatchStart || i > minBatchEnd) && !mCurrentFrame.drop[i] ) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700808 hwc_layer_1_t* layer = &list->hwLayers[i];
809 if(not isSupportedForMDPComp(ctx, layer)) {
810 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
811 __FUNCTION__, i);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800812 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700813 return false;
814 }
815 mCurrentFrame.isFBComposed[i] = false;
816 }
817 }
818
819 mCurrentFrame.fbZ = minBatchStart;
820 mCurrentFrame.fbCount = batchSize;
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800821 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount -
822 mCurrentFrame.dropCount;
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700823
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800824 ALOGD_IF(isDebug(), "%s: fbZ %d batchSize %d fbStart: %d fbEnd: %d",
825 __FUNCTION__, mCurrentFrame.fbZ, batchSize, minBatchStart,
826 minBatchEnd);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800827
radhakrishnac9a67412013-09-25 17:40:42 +0530828 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800829 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530830 }
831
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800832 if(!postHeuristicsHandling(ctx, list)) {
833 ALOGD_IF(isDebug(), "post heuristic handling failed");
834 reset(ctx);
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700835 return false;
836 }
837
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700838 return true;
839}
840
Saurabh Shahb772ae32013-11-18 15:40:02 -0800841bool MDPComp::loadBasedCompPreferMDP(hwc_context_t *ctx,
842 hwc_display_contents_1_t* list) {
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800843 if(not isLoadBasedCompDoable(ctx, list)) {
844 return false;
845 }
846
Saurabh Shahb772ae32013-11-18 15:40:02 -0800847 const int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf2de00f2013-12-11 17:52:53 -0800848 mCurrentFrame.reset(numAppLayers);
849
Saurabh Shahb772ae32013-11-18 15:40:02 -0800850 //Full screen is from ib perspective, not actual full screen
851 const int bpp = 4;
852 double panelRefRate =
853 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
854
855 double bwLeft = sMaxBw - sBwClaimed;
856
857 const int fullScreenLayers = bwLeft * 1000000000 / (ctx->dpyAttr[mDpy].xres
858 * ctx->dpyAttr[mDpy].yres * bpp * panelRefRate);
859
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800860 const int fbBatchSize = (numAppLayers - mCurrentFrame.dropCount)
861 - (fullScreenLayers - 1);
862
Saurabh Shahb772ae32013-11-18 15:40:02 -0800863 //If batch size is not at least 2, we aren't really preferring MDP, since
864 //only 1 layer going to GPU could actually translate into an entire FB
865 //needed to be fetched by MDP, thus needing more b/w rather than less.
866 if(fbBatchSize < 2 || fbBatchSize > numAppLayers) {
867 ALOGD_IF(isDebug(), "%s: Not attempting", __FUNCTION__);
868 return false;
869 }
870
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800871 //Find top fbBatchSize non-dropped layers to get your batch
872 int fbStart = -1, fbEnd = -1, batchCount = fbBatchSize;
873 for(int i = numAppLayers - 1; i >= 0; i--) {
874 if(mCurrentFrame.drop[i])
875 continue;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800876
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800877 if(fbEnd < 0)
878 fbEnd = i;
879
880 if(!(--batchCount)) {
881 fbStart = i;
882 break;
Saurabh Shahb772ae32013-11-18 15:40:02 -0800883 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800884 }
885
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800886 //Bottom layers constitute MDP batch
887 for(int i = 0; i < fbStart; i++) {
888 if((i < fbStart || i > fbEnd) && !mCurrentFrame.drop[i] ) {
889 hwc_layer_1_t* layer = &list->hwLayers[i];
890 if(not isSupportedForMDPComp(ctx, layer)) {
891 ALOGD_IF(isDebug(), "%s: MDP unsupported layer found at %d",
892 __FUNCTION__, i);
893 reset(ctx);
894 return false;
895 }
896 mCurrentFrame.isFBComposed[i] = false;
897 }
898 }
Saurabh Shahb772ae32013-11-18 15:40:02 -0800899
Jeykumar Sankaran846e2792014-01-23 21:59:58 -0800900 mCurrentFrame.fbZ = fbStart;
901 mCurrentFrame.fbCount = fbBatchSize;
902 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
903 - mCurrentFrame.dropCount;
904
905 ALOGD_IF(isDebug(), "%s: FB Z %d, app layers %d, non-dropped layers: %d, "
906 "MDP Batch Size %d",__FUNCTION__, mCurrentFrame.fbZ, numAppLayers,
907 numAppLayers - mCurrentFrame.dropCount, mCurrentFrame.mdpCount);
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800908
radhakrishnac9a67412013-09-25 17:40:42 +0530909 if(sEnable4k2kYUVSplit){
Saurabh Shah3d4b8042013-12-10 15:19:17 -0800910 adjustForSourceSplit(ctx, list);
radhakrishnac9a67412013-09-25 17:40:42 +0530911 }
912
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800913 if(!postHeuristicsHandling(ctx, list)) {
914 ALOGD_IF(isDebug(), "post heuristic handling failed");
915 reset(ctx);
Saurabh Shahb772ae32013-11-18 15:40:02 -0800916 return false;
917 }
918
Saurabh Shahb772ae32013-11-18 15:40:02 -0800919 return true;
920}
921
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700922bool MDPComp::isLoadBasedCompDoable(hwc_context_t *ctx,
923 hwc_display_contents_1_t* list) {
Prabhanjan Kandula3dbbd882013-12-11 14:43:46 +0530924 if(mDpy or isSecurePresent(ctx, mDpy) or
925 isYuvPresent(ctx, mDpy)) {
Saurabh Shah8028e3b2013-10-15 12:27:59 -0700926 return false;
927 }
928 return true;
929}
930
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800931bool MDPComp::tryVideoOnly(hwc_context_t *ctx,
932 hwc_display_contents_1_t* list) {
933 const bool secureOnly = true;
934 return videoOnlyComp(ctx, list, not secureOnly) or
935 videoOnlyComp(ctx, list, secureOnly);
936}
937
938bool MDPComp::videoOnlyComp(hwc_context_t *ctx,
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700939 hwc_display_contents_1_t* list, bool secureOnly) {
Saurabh Shahaa236822013-04-24 18:07:26 -0700940 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -0700941
Saurabh Shahaa236822013-04-24 18:07:26 -0700942 mCurrentFrame.reset(numAppLayers);
Saurabh Shah90b7b9b2013-09-12 16:36:08 -0700943 updateYUV(ctx, list, secureOnly);
Saurabh Shah4fdde762013-04-30 18:47:33 -0700944 int mdpCount = mCurrentFrame.mdpCount;
Saurabh Shahaa236822013-04-24 18:07:26 -0700945
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800946 if(!isYuvPresent(ctx, mDpy) or (mdpCount == 0)) {
947 reset(ctx);
Saurabh Shahaa236822013-04-24 18:07:26 -0700948 return false;
949 }
950
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800951 /* Bail out if we are processing only secured video layers
952 * and we dont have any */
953 if(!isSecurePresent(ctx, mDpy) && secureOnly){
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800954 reset(ctx);
Jeykumar Sankaranf42f0d82013-11-08 18:09:20 -0800955 return false;
956 }
957
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800958 if(mCurrentFrame.fbCount)
959 mCurrentFrame.fbZ = mCurrentFrame.mdpCount;
Saurabh Shah4fdde762013-04-30 18:47:33 -0700960
Saurabh Shahdf4741d2013-12-12 16:40:28 -0800961 if(sEnable4k2kYUVSplit){
962 adjustForSourceSplit(ctx, list);
963 }
964
965 if(!postHeuristicsHandling(ctx, list)) {
966 ALOGD_IF(isDebug(), "post heuristic handling failed");
967 reset(ctx);
Saurabh Shah8c5c8522013-08-29 17:32:49 -0700968 return false;
969 }
970
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800971 return true;
972}
973
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800974/* Checks for conditions where YUV layers cannot be bypassed */
975bool MDPComp::isYUVDoable(hwc_context_t* ctx, hwc_layer_1_t* layer) {
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -0700976 if(isSkipLayer(layer)) {
Saurabh Shahe2474082013-05-15 16:32:13 -0700977 ALOGD_IF(isDebug(), "%s: Video marked SKIP dpy %d", __FUNCTION__, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800978 return false;
979 }
980
Amara Venkata Mastan Manoj Kumar9d373c02013-08-20 14:30:09 -0700981 if(layer->transform & HWC_TRANSFORM_ROT_90 && !canUseRotator(ctx,mDpy)) {
982 ALOGD_IF(isDebug(), "%s: no free DMA pipe",__FUNCTION__);
983 return false;
984 }
985
Jeykumar Sankaran85977e32013-02-25 17:06:08 -0800986 if(isSecuring(ctx, layer)) {
987 ALOGD_IF(isDebug(), "%s: MDP securing is active", __FUNCTION__);
988 return false;
989 }
990
Saurabh Shah4fdde762013-04-30 18:47:33 -0700991 if(!isValidDimension(ctx, layer)) {
992 ALOGD_IF(isDebug(), "%s: Buffer is of invalid width",
993 __FUNCTION__);
994 return false;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -0800995 }
Saurabh Shah4fdde762013-04-30 18:47:33 -0700996
Naseer Ahmeddc61a972013-07-10 17:50:54 -0400997 if(layer->planeAlpha < 0xFF) {
998 ALOGD_IF(isDebug(), "%s: Cannot handle YUV layer with plane alpha\
999 in video only mode",
1000 __FUNCTION__);
1001 return false;
1002 }
1003
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001004 return true;
1005}
1006
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301007/* starts at fromIndex and check for each layer to find
1008 * if it it has overlapping with any Updating layer above it in zorder
1009 * till the end of the batch. returns true if it finds any intersection */
1010bool MDPComp::canPushBatchToTop(const hwc_display_contents_1_t* list,
1011 int fromIndex, int toIndex) {
1012 for(int i = fromIndex; i < toIndex; i++) {
1013 if(mCurrentFrame.isFBComposed[i] && !mCurrentFrame.drop[i]) {
1014 if(intersectingUpdatingLayers(list, i+1, toIndex, i)) {
1015 return false;
1016 }
1017 }
1018 }
1019 return true;
1020}
1021
1022/* Checks if given layer at targetLayerIndex has any
1023 * intersection with all the updating layers in beween
1024 * fromIndex and toIndex. Returns true if it finds intersectiion */
1025bool MDPComp::intersectingUpdatingLayers(const hwc_display_contents_1_t* list,
1026 int fromIndex, int toIndex, int targetLayerIndex) {
1027 for(int i = fromIndex; i <= toIndex; i++) {
1028 if(!mCurrentFrame.isFBComposed[i]) {
1029 if(areLayersIntersecting(&list->hwLayers[i],
1030 &list->hwLayers[targetLayerIndex])) {
1031 return true;
1032 }
1033 }
1034 }
1035 return false;
1036}
1037
1038int MDPComp::getBatch(hwc_display_contents_1_t* list,
1039 int& maxBatchStart, int& maxBatchEnd,
1040 int& maxBatchCount) {
1041 int i = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301042 int fbZOrder =-1;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001043 int droppedLayerCt = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301044 while (i < mCurrentFrame.layerCount) {
1045 int batchCount = 0;
1046 int batchStart = i;
1047 int batchEnd = i;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001048 /* Adjust batch Z order with the dropped layers so far */
1049 int fbZ = batchStart - droppedLayerCt;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301050 int firstZReverseIndex = -1;
Prabhanjan Kandula0ed2cc92013-12-06 12:39:04 +05301051 int updatingLayersAbove = 0;//Updating layer count in middle of batch
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301052 while(i < mCurrentFrame.layerCount) {
1053 if(!mCurrentFrame.isFBComposed[i]) {
1054 if(!batchCount) {
1055 i++;
1056 break;
1057 }
1058 updatingLayersAbove++;
1059 i++;
1060 continue;
1061 } else {
1062 if(mCurrentFrame.drop[i]) {
1063 i++;
Jeykumar Sankaran9502f352014-01-20 12:25:32 -08001064 droppedLayerCt++;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301065 continue;
1066 } else if(updatingLayersAbove <= 0) {
1067 batchCount++;
1068 batchEnd = i;
1069 i++;
1070 continue;
1071 } else { //Layer is FBComposed, not a drop & updatingLayer > 0
1072
1073 // We have a valid updating layer already. If layer-i not
1074 // have overlapping with all updating layers in between
1075 // batch-start and i, then we can add layer i to batch.
1076 if(!intersectingUpdatingLayers(list, batchStart, i-1, i)) {
1077 batchCount++;
1078 batchEnd = i;
1079 i++;
1080 continue;
1081 } else if(canPushBatchToTop(list, batchStart, i)) {
1082 //If All the non-updating layers with in this batch
1083 //does not have intersection with the updating layers
1084 //above in z-order, then we can safely move the batch to
1085 //higher z-order. Increment fbZ as it is moving up.
1086 if( firstZReverseIndex < 0) {
1087 firstZReverseIndex = i;
1088 }
1089 batchCount++;
1090 batchEnd = i;
1091 fbZ += updatingLayersAbove;
1092 i++;
1093 updatingLayersAbove = 0;
1094 continue;
1095 } else {
1096 //both failed.start the loop again from here.
1097 if(firstZReverseIndex >= 0) {
1098 i = firstZReverseIndex;
1099 }
1100 break;
1101 }
1102 }
1103 }
1104 }
1105 if(batchCount > maxBatchCount) {
1106 maxBatchCount = batchCount;
1107 maxBatchStart = batchStart;
1108 maxBatchEnd = batchEnd;
1109 fbZOrder = fbZ;
1110 }
1111 }
1112 return fbZOrder;
1113}
1114
1115bool MDPComp::markLayersForCaching(hwc_context_t* ctx,
1116 hwc_display_contents_1_t* list) {
1117 /* Idea is to keep as many non-updating(cached) layers in FB and
1118 * send rest of them through MDP. This is done in 2 steps.
1119 * 1. Find the maximum contiguous batch of non-updating layers.
1120 * 2. See if we can improve this batch size for caching by adding
1121 * opaque layers around the batch, if they don't have
1122 * any overlapping with the updating layers in between.
1123 * NEVER mark an updating layer for caching.
1124 * But cached ones can be marked for MDP */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001125
1126 int maxBatchStart = -1;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001127 int maxBatchEnd = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001128 int maxBatchCount = 0;
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301129 int fbZ = -1;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001130
1131 /* All or Nothing is cached. No batching needed */
Saurabh Shahaa236822013-04-24 18:07:26 -07001132 if(!mCurrentFrame.fbCount) {
1133 mCurrentFrame.fbZ = -1;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001134 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001135 }
1136 if(!mCurrentFrame.mdpCount) {
1137 mCurrentFrame.fbZ = 0;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001138 return true;
Saurabh Shahaa236822013-04-24 18:07:26 -07001139 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001140
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301141 fbZ = getBatch(list, maxBatchStart, maxBatchEnd, maxBatchCount);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001142
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301143 /* reset rest of the layers lying inside ROI for MDP comp */
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001144 for(int i = 0; i < mCurrentFrame.layerCount; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001145 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001146 if((i < maxBatchStart || i > maxBatchEnd) &&
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301147 mCurrentFrame.isFBComposed[i]){
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001148 if(!mCurrentFrame.drop[i]){
1149 //If an unsupported layer is being attempted to
1150 //be pulled out we should fail
1151 if(not isSupportedForMDPComp(ctx, layer)) {
1152 return false;
1153 }
1154 mCurrentFrame.isFBComposed[i] = false;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001155 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001156 }
1157 }
1158
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301159 // update the frame data
1160 mCurrentFrame.fbZ = fbZ;
1161 mCurrentFrame.fbCount = maxBatchCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001162 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001163 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001164
1165 ALOGD_IF(isDebug(),"%s: cached count: %d",__FUNCTION__,
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301166 mCurrentFrame.fbCount);
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001167
1168 return true;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001169}
Saurabh Shah85234ec2013-04-12 17:09:00 -07001170
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001171void MDPComp::updateLayerCache(hwc_context_t* ctx,
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001172 hwc_display_contents_1_t* list) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001173 int numAppLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001174 int fbCount = 0;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001175
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001176 for(int i = 0; i < numAppLayers; i++) {
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001177 hwc_layer_1_t* layer = &list->hwLayers[i];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001178 if (mCachedFrame.hnd[i] == list->hwLayers[i].handle) {
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001179 if(!mCurrentFrame.drop[i])
1180 fbCount++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001181 mCurrentFrame.isFBComposed[i] = true;
1182 } else {
Saurabh Shahaa236822013-04-24 18:07:26 -07001183 mCurrentFrame.isFBComposed[i] = false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001184 }
1185 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001186
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001187 mCurrentFrame.fbCount = fbCount;
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001188 mCurrentFrame.mdpCount = mCurrentFrame.layerCount - mCurrentFrame.fbCount
1189 - mCurrentFrame.dropCount;
Saurabh Shahe9bc60f2013-08-29 12:58:06 -07001190
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001191 ALOGD_IF(isDebug(),"%s: MDP count: %d FB count %d drop count: %d"
1192 ,__FUNCTION__, mCurrentFrame.mdpCount, mCurrentFrame.fbCount,
1193 mCurrentFrame.dropCount);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001194}
1195
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001196void MDPComp::updateYUV(hwc_context_t* ctx, hwc_display_contents_1_t* list,
1197 bool secureOnly) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001198 int nYuvCount = ctx->listStats[mDpy].yuvCount;
1199 for(int index = 0;index < nYuvCount; index++){
1200 int nYuvIndex = ctx->listStats[mDpy].yuvIndices[index];
1201 hwc_layer_1_t* layer = &list->hwLayers[nYuvIndex];
1202
1203 if(!isYUVDoable(ctx, layer)) {
1204 if(!mCurrentFrame.isFBComposed[nYuvIndex]) {
1205 mCurrentFrame.isFBComposed[nYuvIndex] = true;
1206 mCurrentFrame.fbCount++;
1207 }
1208 } else {
1209 if(mCurrentFrame.isFBComposed[nYuvIndex]) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001210 private_handle_t *hnd = (private_handle_t *)layer->handle;
1211 if(!secureOnly || isSecureBuffer(hnd)) {
1212 mCurrentFrame.isFBComposed[nYuvIndex] = false;
1213 mCurrentFrame.fbCount--;
1214 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001215 }
1216 }
1217 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001218
1219 mCurrentFrame.mdpCount = mCurrentFrame.layerCount -
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001220 mCurrentFrame.fbCount - mCurrentFrame.dropCount;
1221 ALOGD_IF(isDebug(),"%s: fb count: %d",__FUNCTION__,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001222 mCurrentFrame.fbCount);
1223}
1224
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001225bool MDPComp::postHeuristicsHandling(hwc_context_t *ctx,
1226 hwc_display_contents_1_t* list) {
1227
1228 //Capability checks
1229 if(!resourceCheck(ctx, list)) {
1230 ALOGD_IF(isDebug(), "%s: resource check failed", __FUNCTION__);
1231 return false;
1232 }
1233
1234 //Limitations checks
1235 if(!hwLimitationsCheck(ctx, list)) {
1236 ALOGD_IF(isDebug(), "%s: HW limitations",__FUNCTION__);
1237 return false;
1238 }
1239
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001240 //Configure framebuffer first if applicable
1241 if(mCurrentFrame.fbZ >= 0) {
1242 if(!ctx->mFBUpdate[mDpy]->prepare(ctx, list, mCurrentFrame.fbZ)) {
1243 ALOGD_IF(isDebug(), "%s configure framebuffer failed",
1244 __FUNCTION__);
1245 return false;
1246 }
1247 }
1248
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001249 mCurrentFrame.map();
1250
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001251 if(!allocLayerPipes(ctx, list)) {
1252 ALOGD_IF(isDebug(), "%s: Unable to allocate MDP pipes", __FUNCTION__);
Saurabh Shahaa236822013-04-24 18:07:26 -07001253 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001254 }
1255
1256 for (int index = 0, mdpNextZOrder = 0; index < mCurrentFrame.layerCount;
Saurabh Shahaa236822013-04-24 18:07:26 -07001257 index++) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001258 if(!mCurrentFrame.isFBComposed[index]) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001259 int mdpIndex = mCurrentFrame.layerToMDP[index];
1260 hwc_layer_1_t* layer = &list->hwLayers[index];
1261
Prabhanjan Kandula9bd5f642013-09-25 17:00:36 +05301262 //Leave fbZ for framebuffer. CACHE/GLES layers go here.
1263 if(mdpNextZOrder == mCurrentFrame.fbZ) {
1264 mdpNextZOrder++;
1265 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001266 MdpPipeInfo* cur_pipe = mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1267 cur_pipe->zOrder = mdpNextZOrder++;
1268
radhakrishnac9a67412013-09-25 17:40:42 +05301269 private_handle_t *hnd = (private_handle_t *)layer->handle;
1270 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1271 if(configure4k2kYuv(ctx, layer,
1272 mCurrentFrame.mdpToLayer[mdpIndex])
1273 != 0 ){
1274 ALOGD_IF(isDebug(), "%s: Failed to configure split pipes \
1275 for layer %d",__FUNCTION__, index);
1276 return false;
1277 }
1278 else{
1279 mdpNextZOrder++;
1280 }
1281 continue;
1282 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001283 if(configure(ctx, layer, mCurrentFrame.mdpToLayer[mdpIndex]) != 0 ){
1284 ALOGD_IF(isDebug(), "%s: Failed to configure overlay for \
radhakrishnac9a67412013-09-25 17:40:42 +05301285 layer %d",__FUNCTION__, index);
Saurabh Shahaa236822013-04-24 18:07:26 -07001286 return false;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001287 }
Saurabh Shahaa236822013-04-24 18:07:26 -07001288 }
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001289 }
1290
Saurabh Shaha36be922013-12-16 18:18:39 -08001291 if(!ctx->mOverlay->validateAndSet(mDpy, ctx->dpyAttr[mDpy].fd)) {
1292 ALOGD_IF(isDebug(), "%s: Failed to validate and set overlay for dpy %d"
1293 ,__FUNCTION__, mDpy);
1294 return false;
1295 }
1296
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001297 setRedraw(ctx, list);
Saurabh Shahaa236822013-04-24 18:07:26 -07001298 return true;
1299}
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001300
Saurabh Shah173f4242013-11-20 09:50:12 -08001301bool MDPComp::resourceCheck(hwc_context_t *ctx,
1302 hwc_display_contents_1_t *list) {
1303 const bool fbUsed = mCurrentFrame.fbCount;
1304 if(mCurrentFrame.mdpCount > sMaxPipesPerMixer - fbUsed) {
1305 ALOGD_IF(isDebug(), "%s: Exceeds MAX_PIPES_PER_MIXER",__FUNCTION__);
1306 return false;
1307 }
1308
1309 if(!arePipesAvailable(ctx, list)) {
1310 return false;
1311 }
1312
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001313 double size = calcMDPBytesRead(ctx, list);
Saurabh Shah173f4242013-11-20 09:50:12 -08001314 if(!bandwidthCheck(ctx, size)) {
1315 ALOGD_IF(isDebug(), "%s: Exceeds bandwidth",__FUNCTION__);
1316 return false;
1317 }
1318
1319 return true;
1320}
1321
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001322double MDPComp::calcMDPBytesRead(hwc_context_t *ctx,
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001323 hwc_display_contents_1_t* list) {
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001324 double size = 0;
1325 const double GIG = 1000000000.0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001326
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001327 //Skip for targets where no device tree value for bw is supplied
1328 if(sMaxBw <= 0.0) {
1329 return 0.0;
1330 }
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001331
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001332 for (uint32_t i = 0; i < list->numHwLayers - 1; i++) {
1333 if(!mCurrentFrame.isFBComposed[i]) {
1334 hwc_layer_1_t* layer = &list->hwLayers[i];
1335 private_handle_t *hnd = (private_handle_t *)layer->handle;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001336 if (hnd) {
Saurabh Shah62e1d732013-09-17 10:44:05 -07001337 hwc_rect_t crop = integerizeSourceCrop(layer->sourceCropf);
Saurabh Shah90789162013-09-16 10:29:20 -07001338 hwc_rect_t dst = layer->displayFrame;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001339 float bpp = ((float)hnd->size) / (hnd->width * hnd->height);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001340 size += (bpp * (crop.right - crop.left) *
1341 (crop.bottom - crop.top) *
1342 ctx->dpyAttr[mDpy].yres / (dst.bottom - dst.top)) /
1343 GIG;
Terence Hampson9cd5fa92013-09-10 17:06:37 -04001344 }
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001345 }
1346 }
1347
1348 if(mCurrentFrame.fbCount) {
1349 hwc_layer_1_t* layer = &list->hwLayers[list->numHwLayers - 1];
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001350 int tempw, temph;
1351 size += (getBufferSizeAndDimensions(
1352 layer->displayFrame.right - layer->displayFrame.left,
1353 layer->displayFrame.bottom - layer->displayFrame.top,
1354 HAL_PIXEL_FORMAT_RGBA_8888,
1355 tempw, temph)) / GIG;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001356 }
1357
1358 return size;
1359}
1360
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001361bool MDPComp::bandwidthCheck(hwc_context_t *ctx, const double& size) {
1362 //Skip for targets where no device tree value for bw is supplied
1363 if(sMaxBw <= 0.0) {
1364 return true;
1365 }
1366
1367 double panelRefRate =
1368 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1369 if((size * panelRefRate) > (sMaxBw - sBwClaimed)) {
1370 return false;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001371 }
1372 return true;
1373}
1374
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301375bool MDPComp::hwLimitationsCheck(hwc_context_t* ctx,
1376 hwc_display_contents_1_t* list) {
1377
1378 //A-family hw limitation:
1379 //If a layer need alpha scaling, MDP can not support.
1380 if(ctx->mMDP.version < qdutils::MDSS_V5) {
1381 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1382 if(!mCurrentFrame.isFBComposed[i] &&
1383 isAlphaScaled( &list->hwLayers[i])) {
1384 ALOGD_IF(isDebug(), "%s:frame needs alphaScaling",__FUNCTION__);
1385 return false;
1386 }
1387 }
1388 }
1389
1390 // On 8x26 & 8974 hw, we have a limitation of downscaling+blending.
1391 //If multiple layers requires downscaling and also they are overlapping
1392 //fall back to GPU since MDSS can not handle it.
1393 if(qdutils::MDPVersion::getInstance().is8x74v2() ||
1394 qdutils::MDPVersion::getInstance().is8x26()) {
1395 for(int i = 0; i < mCurrentFrame.layerCount-1; ++i) {
1396 hwc_layer_1_t* botLayer = &list->hwLayers[i];
1397 if(!mCurrentFrame.isFBComposed[i] &&
1398 isDownscaleRequired(botLayer)) {
1399 //if layer-i is marked for MDP and needs downscaling
1400 //check if any MDP layer on top of i & overlaps with layer-i
1401 for(int j = i+1; j < mCurrentFrame.layerCount; ++j) {
1402 hwc_layer_1_t* topLayer = &list->hwLayers[j];
1403 if(!mCurrentFrame.isFBComposed[j] &&
1404 isDownscaleRequired(topLayer)) {
1405 hwc_rect_t r = getIntersection(botLayer->displayFrame,
1406 topLayer->displayFrame);
1407 if(isValidRect(r))
1408 return false;
1409 }
1410 }
1411 }
1412 }
1413 }
1414 return true;
1415}
1416
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001417int MDPComp::prepare(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001418 int ret = 0;
Saurabh Shahaa236822013-04-24 18:07:26 -07001419 const int numLayers = ctx->listStats[mDpy].numAppLayers;
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001420 MDPVersion& mdpVersion = qdutils::MDPVersion::getInstance();
Ramkumar Radhakrishnanc5893f12013-06-06 19:43:53 -07001421
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001422 //number of app layers exceeds MAX_NUM_APP_LAYERS fall back to GPU
1423 //do not cache the information for next draw cycle.
1424 if(numLayers > MAX_NUM_APP_LAYERS) {
1425 ALOGI("%s: Number of App layers exceeded the limit ",
1426 __FUNCTION__);
1427 mCachedFrame.reset();
1428 return -1;
1429 }
1430
Saurabh Shahb39f8152013-08-22 10:21:44 -07001431 //reset old data
1432 mCurrentFrame.reset(numLayers);
Jeykumar Sankaran6a9bb9e2013-08-01 14:19:26 -07001433 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1434 mCurrentFrame.dropCount = 0;
Prabhanjan Kandula088bd892013-07-02 23:47:13 +05301435
Ramkumar Radhakrishnana70981a2013-08-28 11:33:53 -07001436 // Detect the start of animation and fall back to GPU only once to cache
1437 // all the layers in FB and display FB content untill animation completes.
1438 if(ctx->listStats[mDpy].isDisplayAnimating) {
1439 mCurrentFrame.needsRedraw = false;
1440 if(ctx->mAnimationState[mDpy] == ANIMATION_STOPPED) {
1441 mCurrentFrame.needsRedraw = true;
1442 ctx->mAnimationState[mDpy] = ANIMATION_STARTED;
1443 }
1444 setMDPCompLayerFlags(ctx, list);
1445 mCachedFrame.updateCounts(mCurrentFrame);
1446 ret = -1;
1447 return ret;
1448 } else {
1449 ctx->mAnimationState[mDpy] = ANIMATION_STOPPED;
1450 }
1451
Saurabh Shahb39f8152013-08-22 10:21:44 -07001452 //Hard conditions, if not met, cannot do MDP comp
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001453 if(isFrameDoable(ctx)) {
1454 generateROI(ctx, list);
Saurabh Shahb39f8152013-08-22 10:21:44 -07001455
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001456 //Convert from kbps to gbps
1457 sMaxBw = mdpVersion.getHighBw() / 1000000.0;
1458 if (ctx->mExtDisplay->isConnected() ||
1459 ctx->mMDP.panel != MIPI_CMD_PANEL) {
1460 sMaxBw = mdpVersion.getLowBw() / 1000000.0;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001461 }
1462
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001463 if(tryFullFrame(ctx, list) || tryVideoOnly(ctx, list)) {
1464 setMDPCompLayerFlags(ctx, list);
1465 } else {
1466 reset(ctx);
1467 memset(&mCurrentFrame.drop, 0, sizeof(mCurrentFrame.drop));
1468 mCurrentFrame.dropCount = 0;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001469 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001470 }
1471 } else {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001472 ALOGD_IF( isDebug(),"%s: MDP Comp not possible for this frame",
1473 __FUNCTION__);
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001474 ret = -1;
Saurabh Shahb39f8152013-08-22 10:21:44 -07001475 }
Saurabh Shahb39f8152013-08-22 10:21:44 -07001476
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001477 if(isDebug()) {
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001478 ALOGD("GEOMETRY change: %d",
1479 (list->flags & HWC_GEOMETRY_CHANGED));
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001480 android::String8 sDump("");
1481 dump(sDump);
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001482 ALOGD("%s",sDump.string());
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001483 }
1484
Saurabh Shahdf4741d2013-12-12 16:40:28 -08001485 mCachedFrame.cacheAll(list);
1486 mCachedFrame.updateCounts(mCurrentFrame);
Saurabh Shahf5f2b132013-11-25 12:08:35 -08001487 double panelRefRate =
1488 1000000000.0 / ctx->dpyAttr[mDpy].vsync_period;
1489 sBwClaimed += calcMDPBytesRead(ctx, list) * panelRefRate;
Saurabh Shah8c5c8522013-08-29 17:32:49 -07001490 return ret;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001491}
1492
radhakrishnac9a67412013-09-25 17:40:42 +05301493bool MDPComp::allocSplitVGPipesfor4k2k(hwc_context_t *ctx,
1494 hwc_display_contents_1_t* list, int index) {
1495
1496 bool bRet = true;
1497 hwc_layer_1_t* layer = &list->hwLayers[index];
1498 private_handle_t *hnd = (private_handle_t *)layer->handle;
1499 int mdpIndex = mCurrentFrame.layerToMDP[index];
1500 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
1501 info.pipeInfo = new MdpYUVPipeInfo;
1502 info.rot = NULL;
1503 MdpYUVPipeInfo& pipe_info = *(MdpYUVPipeInfo*)info.pipeInfo;
1504 ePipeType type = MDPCOMP_OV_VG;
1505
1506 pipe_info.lIndex = ovutils::OV_INVALID;
1507 pipe_info.rIndex = ovutils::OV_INVALID;
1508
1509 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1510 if(pipe_info.lIndex == ovutils::OV_INVALID){
1511 bRet = false;
1512 ALOGD_IF(isDebug(),"%s: allocating first VG pipe failed",
1513 __FUNCTION__);
1514 }
1515 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
1516 if(pipe_info.rIndex == ovutils::OV_INVALID){
1517 bRet = false;
1518 ALOGD_IF(isDebug(),"%s: allocating second VG pipe failed",
1519 __FUNCTION__);
1520 }
1521 return bRet;
1522}
Saurabh Shah88e4d272013-09-03 13:31:29 -07001523//=============MDPCompNonSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001524
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001525void MDPCompNonSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301526 hwc_display_contents_1_t* list){
1527 //As we split 4kx2k yuv layer and program to 2 VG pipes
1528 //(if available) increase mdpcount accordingly
1529 mCurrentFrame.mdpCount += ctx->listStats[mDpy].yuv4k2kCount;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001530
1531 //If 4k2k Yuv layer split is possible, and if
1532 //fbz is above 4k2k layer, increment fb zorder by 1
1533 //as we split 4k2k layer and increment zorder for right half
1534 //of the layer
1535 if(mCurrentFrame.fbZ >= 0) {
1536 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1537 for(int index = 0; index < n4k2kYuvCount; index++){
1538 int n4k2kYuvIndex =
1539 ctx->listStats[mDpy].yuv4k2kIndices[index];
1540 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1541 mCurrentFrame.fbZ += 1;
1542 }
1543 }
1544 }
radhakrishnac9a67412013-09-25 17:40:42 +05301545}
1546
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001547/*
1548 * Configures pipe(s) for MDP composition
1549 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07001550int MDPCompNonSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001551 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07001552 MdpPipeInfoNonSplit& mdp_info =
1553 *(static_cast<MdpPipeInfoNonSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08001554 eMdpFlags mdpFlags = OV_MDP_BACKEND_COMPOSITION;
1555 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1556 eIsFg isFg = IS_FG_OFF;
1557 eDest dest = mdp_info.index;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001558
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001559 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipe: %d",
1560 __FUNCTION__, layer, zOrder, dest);
1561
Saurabh Shah88e4d272013-09-03 13:31:29 -07001562 return configureNonSplit(ctx, layer, mDpy, mdpFlags, zOrder, isFg, dest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001563 &PipeLayerPair.rot);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001564}
1565
Saurabh Shah88e4d272013-09-03 13:31:29 -07001566bool MDPCompNonSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001567 hwc_display_contents_1_t* list) {
1568 overlay::Overlay& ov = *ctx->mOverlay;
1569 int numPipesNeeded = mCurrentFrame.mdpCount;
1570 int availPipes = ov.availablePipes(mDpy, Overlay::MIXER_DEFAULT);
1571
1572 //Reserve pipe for FB
1573 if(mCurrentFrame.fbCount)
1574 availPipes -= 1;
1575
1576 if(numPipesNeeded > availPipes) {
1577 ALOGD_IF(isDebug(), "%s: Insufficient pipes, dpy %d needed %d, avail %d",
1578 __FUNCTION__, mDpy, numPipesNeeded, availPipes);
1579 return false;
1580 }
1581
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001582 if(not areVGPipesAvailable(ctx, list)) {
1583 return false;
1584 }
1585
1586 return true;
1587}
1588
1589bool MDPCompNonSplit::areVGPipesAvailable(hwc_context_t *ctx,
1590 hwc_display_contents_1_t* list) {
1591 overlay::Overlay& ov = *ctx->mOverlay;
1592 int pipesNeeded = 0;
1593 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1594 if(!mCurrentFrame.isFBComposed[i]) {
1595 hwc_layer_1_t* layer = &list->hwLayers[i];
1596 hwc_rect_t dst = layer->displayFrame;
1597 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301598 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1599 pipesNeeded = pipesNeeded + 2;
1600 }
1601 else if(isYuvBuffer(hnd)) {
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001602 pipesNeeded++;
1603 }
1604 }
1605 }
1606
1607 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1608 if(pipesNeeded > availableVGPipes) {
1609 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1610 "dpy %d needed %d, avail %d",
1611 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1612 return false;
1613 }
1614
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001615 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001616}
1617
Saurabh Shah88e4d272013-09-03 13:31:29 -07001618bool MDPCompNonSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001619 hwc_display_contents_1_t* list) {
1620 for(int index = 0; index < mCurrentFrame.layerCount; index++) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001621
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001622 if(mCurrentFrame.isFBComposed[index]) continue;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001623
Jeykumar Sankarancf537002013-01-21 21:19:15 -08001624 hwc_layer_1_t* layer = &list->hwLayers[index];
1625 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301626 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1627 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1628 continue;
1629 }
1630 }
1631
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001632 int mdpIndex = mCurrentFrame.layerToMDP[index];
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001633 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001634 info.pipeInfo = new MdpPipeInfoNonSplit;
Saurabh Shahacf10202013-02-26 10:15:15 -08001635 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001636 MdpPipeInfoNonSplit& pipe_info = *(MdpPipeInfoNonSplit*)info.pipeInfo;
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001637 ePipeType type = MDPCOMP_OV_ANY;
1638
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001639 if(isYuvBuffer(hnd)) {
1640 type = MDPCOMP_OV_VG;
Prabhanjan Kandula47191dc2014-01-22 23:01:45 +05301641 } else if(qdutils::MDPVersion::getInstance().is8x26() &&
1642 (ctx->dpyAttr[HWC_DISPLAY_PRIMARY].xres > 1024)) {
1643 if(qhwc::needsScaling(layer))
1644 type = MDPCOMP_OV_RGB;
Prabhanjan Kandula21918db2013-11-26 15:51:58 +05301645 } else if(!qhwc::needsScaling(layer)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001646 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
1647 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankarana37fdbf2013-03-06 18:59:28 -08001648 type = MDPCOMP_OV_DMA;
1649 }
1650
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001651 pipe_info.index = getMdpPipe(ctx, type, Overlay::MIXER_DEFAULT);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001652 if(pipe_info.index == ovutils::OV_INVALID) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001653 ALOGD_IF(isDebug(), "%s: Unable to get pipe type = %d",
1654 __FUNCTION__, (int) type);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001655 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001656 }
1657 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001658 return true;
1659}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001660
radhakrishnac9a67412013-09-25 17:40:42 +05301661int MDPCompNonSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1662 PipeLayerPair& PipeLayerPair) {
1663 MdpYUVPipeInfo& mdp_info =
1664 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1665 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1666 eIsFg isFg = IS_FG_OFF;
1667 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1668 eDest lDest = mdp_info.lIndex;
1669 eDest rDest = mdp_info.rIndex;
1670
1671 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1672 lDest, rDest, &PipeLayerPair.rot);
1673}
1674
Saurabh Shah88e4d272013-09-03 13:31:29 -07001675bool MDPCompNonSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001676
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001677 if(!isEnabled()) {
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001678 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
1679 return true;
Saurabh Shahcbf7ccc2012-12-19 16:45:51 -08001680 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001681
1682 if(!ctx || !list) {
1683 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001684 return false;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001685 }
1686
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05301687 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
1688 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
1689 return true;
1690 }
1691
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001692 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07001693 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08001694 idleInvalidator->handleUpdateEvent();
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001695
1696 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001697 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001698
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001699 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
1700 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001701 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001702 if(mCurrentFrame.isFBComposed[i]) continue;
1703
Naseer Ahmed5b6708a2012-08-02 13:46:08 -07001704 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08001705 private_handle_t *hnd = (private_handle_t *)layer->handle;
1706 if(!hnd) {
Sushil Chauhan897a9c32013-07-18 11:09:55 -07001707 if (!(layer->flags & HWC_COLOR_FILL)) {
1708 ALOGE("%s handle null", __FUNCTION__);
1709 return false;
1710 }
1711 // No PLAY for Color layer
1712 layerProp[i].mFlags &= ~HWC_MDPCOMP;
1713 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001714 }
1715
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001716 int mdpIndex = mCurrentFrame.layerToMDP[i];
1717
radhakrishnac9a67412013-09-25 17:40:42 +05301718 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
1719 {
1720 MdpYUVPipeInfo& pipe_info =
1721 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
1722 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1723 ovutils::eDest indexL = pipe_info.lIndex;
1724 ovutils::eDest indexR = pipe_info.rIndex;
1725 int fd = hnd->fd;
1726 uint32_t offset = hnd->offset;
1727 if(rot) {
1728 rot->queueBuffer(fd, offset);
1729 fd = rot->getDstMemId();
1730 offset = rot->getDstOffset();
1731 }
1732 if(indexL != ovutils::OV_INVALID) {
1733 ovutils::eDest destL = (ovutils::eDest)indexL;
1734 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1735 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
1736 if (!ov.queueBuffer(fd, offset, destL)) {
1737 ALOGE("%s: queueBuffer failed for display:%d",
1738 __FUNCTION__, mDpy);
1739 return false;
1740 }
1741 }
1742
1743 if(indexR != ovutils::OV_INVALID) {
1744 ovutils::eDest destR = (ovutils::eDest)indexR;
1745 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1746 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
1747 if (!ov.queueBuffer(fd, offset, destR)) {
1748 ALOGE("%s: queueBuffer failed for display:%d",
1749 __FUNCTION__, mDpy);
1750 return false;
1751 }
1752 }
1753 }
1754 else{
1755 MdpPipeInfoNonSplit& pipe_info =
Saurabh Shah88e4d272013-09-03 13:31:29 -07001756 *(MdpPipeInfoNonSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
radhakrishnac9a67412013-09-25 17:40:42 +05301757 ovutils::eDest dest = pipe_info.index;
1758 if(dest == ovutils::OV_INVALID) {
1759 ALOGE("%s: Invalid pipe index (%d)", __FUNCTION__, dest);
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001760 return false;
radhakrishnac9a67412013-09-25 17:40:42 +05301761 }
Saurabh Shahacf10202013-02-26 10:15:15 -08001762
radhakrishnac9a67412013-09-25 17:40:42 +05301763 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
1764 continue;
1765 }
1766
1767 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
1768 using pipe: %d", __FUNCTION__, layer,
1769 hnd, dest );
1770
1771 int fd = hnd->fd;
1772 uint32_t offset = hnd->offset;
1773
1774 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
1775 if(rot) {
1776 if(!rot->queueBuffer(fd, offset))
1777 return false;
1778 fd = rot->getDstMemId();
1779 offset = rot->getDstOffset();
1780 }
1781
1782 if (!ov.queueBuffer(fd, offset, dest)) {
1783 ALOGE("%s: queueBuffer failed for display:%d ",
1784 __FUNCTION__, mDpy);
1785 return false;
1786 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001787 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001788
1789 layerProp[i].mFlags &= ~HWC_MDPCOMP;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001790 }
Naseer Ahmed54821fe2012-11-28 18:44:38 -05001791 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07001792}
1793
Saurabh Shah88e4d272013-09-03 13:31:29 -07001794//=============MDPCompSplit===================================================
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001795
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001796void MDPCompSplit::adjustForSourceSplit(hwc_context_t *ctx,
radhakrishnac9a67412013-09-25 17:40:42 +05301797 hwc_display_contents_1_t* list){
1798 //if 4kx2k yuv layer is totally present in either in left half
1799 //or right half then try splitting the yuv layer to avoid decimation
1800 int n4k2kYuvCount = ctx->listStats[mDpy].yuv4k2kCount;
1801 const int lSplit = getLeftSplit(ctx, mDpy);
1802 for(int index = 0; index < n4k2kYuvCount; index++){
1803 int n4k2kYuvIndex = ctx->listStats[mDpy].yuv4k2kIndices[index];
1804 hwc_layer_1_t* layer = &list->hwLayers[n4k2kYuvIndex];
1805 hwc_rect_t dst = layer->displayFrame;
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001806 if((dst.left > lSplit) || (dst.right < lSplit)) {
radhakrishnac9a67412013-09-25 17:40:42 +05301807 mCurrentFrame.mdpCount += 1;
1808 }
Saurabh Shah3d4b8042013-12-10 15:19:17 -08001809 if(mCurrentFrame.fbZ > n4k2kYuvIndex){
1810 mCurrentFrame.fbZ += 1;
1811 }
radhakrishnac9a67412013-09-25 17:40:42 +05301812 }
1813}
1814
Saurabh Shah88e4d272013-09-03 13:31:29 -07001815int MDPCompSplit::pipesNeeded(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001816 hwc_display_contents_1_t* list,
1817 int mixer) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001818 int pipesNeeded = 0;
Saurabh Shah67a38c32013-06-10 16:23:15 -07001819 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001820
1821 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001822
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001823 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1824 if(!mCurrentFrame.isFBComposed[i]) {
1825 hwc_layer_1_t* layer = &list->hwLayers[i];
1826 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001827 if(mixer == Overlay::MIXER_LEFT && dst.left < lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001828 pipesNeeded++;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001829 } else if(mixer == Overlay::MIXER_RIGHT && dst.right > lSplit) {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001830 pipesNeeded++;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001831 }
1832 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001833 }
1834 return pipesNeeded;
1835}
1836
Saurabh Shah88e4d272013-09-03 13:31:29 -07001837bool MDPCompSplit::arePipesAvailable(hwc_context_t *ctx,
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001838 hwc_display_contents_1_t* list) {
1839 overlay::Overlay& ov = *ctx->mOverlay;
Saurabh Shah082468e2013-09-12 10:05:32 -07001840 int totalPipesNeeded = 0;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001841
1842 for(int i = 0; i < Overlay::MIXER_MAX; i++) {
1843 int numPipesNeeded = pipesNeeded(ctx, list, i);
1844 int availPipes = ov.availablePipes(mDpy, i);
1845
1846 //Reserve pipe(s)for FB
1847 if(mCurrentFrame.fbCount)
Saurabh Shah082468e2013-09-12 10:05:32 -07001848 numPipesNeeded += 1;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001849
Saurabh Shah082468e2013-09-12 10:05:32 -07001850 totalPipesNeeded += numPipesNeeded;
1851
1852 //Per mixer check.
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001853 if(numPipesNeeded > availPipes) {
1854 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1855 "dpy %d mixer %d needed %d, avail %d",
1856 __FUNCTION__, mDpy, i, numPipesNeeded, availPipes);
1857 return false;
1858 }
1859 }
Saurabh Shah082468e2013-09-12 10:05:32 -07001860
1861 //Per display check, since unused pipes can get counted twice.
1862 int totalPipesAvailable = ov.availablePipes(mDpy);
1863 if(totalPipesNeeded > totalPipesAvailable) {
1864 ALOGD_IF(isDebug(), "%s: Insufficient pipes for "
1865 "dpy %d needed %d, avail %d",
1866 __FUNCTION__, mDpy, totalPipesNeeded, totalPipesAvailable);
1867 return false;
1868 }
1869
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001870 if(not areVGPipesAvailable(ctx, list)) {
1871 return false;
1872 }
1873
1874 return true;
1875}
1876
1877bool MDPCompSplit::areVGPipesAvailable(hwc_context_t *ctx,
1878 hwc_display_contents_1_t* list) {
1879 overlay::Overlay& ov = *ctx->mOverlay;
1880 int pipesNeeded = 0;
1881 const int lSplit = getLeftSplit(ctx, mDpy);
1882 for(int i = 0; i < mCurrentFrame.layerCount; ++i) {
1883 if(!mCurrentFrame.isFBComposed[i]) {
1884 hwc_layer_1_t* layer = &list->hwLayers[i];
1885 hwc_rect_t dst = layer->displayFrame;
1886 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301887 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1888 if((dst.left > lSplit)||(dst.right < lSplit)){
1889 pipesNeeded = pipesNeeded + 2;
1890 continue;
1891 }
1892 }
Saurabh Shah90b7b9b2013-09-12 16:36:08 -07001893 if(isYuvBuffer(hnd)) {
1894 if(dst.left < lSplit) {
1895 pipesNeeded++;
1896 }
1897 if(dst.right > lSplit) {
1898 pipesNeeded++;
1899 }
1900 }
1901 }
1902 }
1903
1904 int availableVGPipes = ov.availablePipes(mDpy, ovutils::OV_MDP_PIPE_VG);
1905 if(pipesNeeded > availableVGPipes) {
1906 ALOGD_IF(isDebug(), "%s: Insufficient VG pipes for video layers"
1907 "dpy %d needed %d, avail %d",
1908 __FUNCTION__, mDpy, pipesNeeded, availableVGPipes);
1909 return false;
1910 }
1911
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001912 return true;
1913}
1914
Saurabh Shah88e4d272013-09-03 13:31:29 -07001915bool MDPCompSplit::acquireMDPPipes(hwc_context_t *ctx, hwc_layer_1_t* layer,
1916 MdpPipeInfoSplit& pipe_info,
Saurabh Shah67a38c32013-06-10 16:23:15 -07001917 ePipeType type) {
1918 const int xres = ctx->dpyAttr[mDpy].xres;
Saurabh Shah07a8ca82013-08-06 18:45:42 -07001919 const int lSplit = getLeftSplit(ctx, mDpy);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001920
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001921 hwc_rect_t dst = layer->displayFrame;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001922 pipe_info.lIndex = ovutils::OV_INVALID;
1923 pipe_info.rIndex = ovutils::OV_INVALID;
1924
1925 if (dst.left < lSplit) {
1926 pipe_info.lIndex = getMdpPipe(ctx, type, Overlay::MIXER_LEFT);
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001927 if(pipe_info.lIndex == ovutils::OV_INVALID)
1928 return false;
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001929 }
1930
1931 if(dst.right > lSplit) {
1932 pipe_info.rIndex = getMdpPipe(ctx, type, Overlay::MIXER_RIGHT);
1933 if(pipe_info.rIndex == ovutils::OV_INVALID)
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001934 return false;
1935 }
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001936
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08001937 return true;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001938}
1939
Saurabh Shah88e4d272013-09-03 13:31:29 -07001940bool MDPCompSplit::allocLayerPipes(hwc_context_t *ctx,
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001941 hwc_display_contents_1_t* list) {
1942 for(int index = 0 ; index < mCurrentFrame.layerCount; index++) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001943
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001944 if(mCurrentFrame.isFBComposed[index]) continue;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001945
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001946 hwc_layer_1_t* layer = &list->hwLayers[index];
1947 private_handle_t *hnd = (private_handle_t *)layer->handle;
radhakrishnac9a67412013-09-25 17:40:42 +05301948 hwc_rect_t dst = layer->displayFrame;
1949 const int lSplit = getLeftSplit(ctx, mDpy);
1950 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit){
1951 if((dst.left > lSplit)||(dst.right < lSplit)){
1952 if(allocSplitVGPipesfor4k2k(ctx, list, index)){
1953 continue;
1954 }
1955 }
1956 }
Saurabh Shah0d65dbe2013-06-06 18:33:16 -07001957 int mdpIndex = mCurrentFrame.layerToMDP[index];
1958 PipeLayerPair& info = mCurrentFrame.mdpToLayer[mdpIndex];
Saurabh Shah88e4d272013-09-03 13:31:29 -07001959 info.pipeInfo = new MdpPipeInfoSplit;
Saurabh Shah9e3adb22013-03-26 11:16:27 -07001960 info.rot = NULL;
Saurabh Shah88e4d272013-09-03 13:31:29 -07001961 MdpPipeInfoSplit& pipe_info = *(MdpPipeInfoSplit*)info.pipeInfo;
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001962 ePipeType type = MDPCOMP_OV_ANY;
1963
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001964 if(isYuvBuffer(hnd)) {
1965 type = MDPCOMP_OV_VG;
Sushil Chauhan15a2ea62013-09-04 18:28:36 -07001966 } else if(!qhwc::needsScalingWithSplit(ctx, layer, mDpy)
Saurabh Shah85234ec2013-04-12 17:09:00 -07001967 && Overlay::getDMAMode() != Overlay::DMA_BLOCK_MODE
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001968 && ctx->mMDP.version >= qdutils::MDSS_V5) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001969 type = MDPCOMP_OV_DMA;
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001970 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001971
1972 if(!acquireMDPPipes(ctx, layer, pipe_info, type)) {
Saurabh Shahe51f8ca2013-05-06 17:26:16 -07001973 ALOGD_IF(isDebug(), "%s: Unable to get pipe for type = %d",
1974 __FUNCTION__, (int) type);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001975 return false;
1976 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08001977 }
1978 return true;
1979}
Saurabh Shahaf5f5972013-07-30 13:56:35 -07001980
radhakrishnac9a67412013-09-25 17:40:42 +05301981int MDPCompSplit::configure4k2kYuv(hwc_context_t *ctx, hwc_layer_1_t *layer,
1982 PipeLayerPair& PipeLayerPair) {
1983 const int lSplit = getLeftSplit(ctx, mDpy);
1984 hwc_rect_t dst = layer->displayFrame;
1985 if((dst.left > lSplit)||(dst.right < lSplit)){
1986 MdpYUVPipeInfo& mdp_info =
1987 *(static_cast<MdpYUVPipeInfo*>(PipeLayerPair.pipeInfo));
1988 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
1989 eIsFg isFg = IS_FG_OFF;
1990 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
1991 eDest lDest = mdp_info.lIndex;
1992 eDest rDest = mdp_info.rIndex;
1993
1994 return configureSourceSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg,
1995 lDest, rDest, &PipeLayerPair.rot);
1996 }
1997 else{
1998 return configure(ctx, layer, PipeLayerPair);
1999 }
2000}
2001
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002002/*
2003 * Configures pipe(s) for MDP composition
2004 */
Saurabh Shah88e4d272013-09-03 13:31:29 -07002005int MDPCompSplit::configure(hwc_context_t *ctx, hwc_layer_1_t *layer,
Saurabh Shah67a38c32013-06-10 16:23:15 -07002006 PipeLayerPair& PipeLayerPair) {
Saurabh Shah88e4d272013-09-03 13:31:29 -07002007 MdpPipeInfoSplit& mdp_info =
2008 *(static_cast<MdpPipeInfoSplit*>(PipeLayerPair.pipeInfo));
Saurabh Shahacf10202013-02-26 10:15:15 -08002009 eZorder zOrder = static_cast<eZorder>(mdp_info.zOrder);
2010 eIsFg isFg = IS_FG_OFF;
2011 eMdpFlags mdpFlagsL = OV_MDP_BACKEND_COMPOSITION;
2012 eDest lDest = mdp_info.lIndex;
2013 eDest rDest = mdp_info.rIndex;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002014
2015 ALOGD_IF(isDebug(),"%s: configuring: layer: %p z_order: %d dest_pipeL: %d"
2016 "dest_pipeR: %d",__FUNCTION__, layer, zOrder, lDest, rDest);
2017
Saurabh Shah88e4d272013-09-03 13:31:29 -07002018 return configureSplit(ctx, layer, mDpy, mdpFlagsL, zOrder, isFg, lDest,
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002019 rDest, &PipeLayerPair.rot);
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002020}
2021
Saurabh Shah88e4d272013-09-03 13:31:29 -07002022bool MDPCompSplit::draw(hwc_context_t *ctx, hwc_display_contents_1_t* list) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002023
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002024 if(!isEnabled()) {
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002025 ALOGD_IF(isDebug(),"%s: MDP Comp not configured", __FUNCTION__);
2026 return true;
2027 }
2028
2029 if(!ctx || !list) {
2030 ALOGE("%s: invalid contxt or list",__FUNCTION__);
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002031 return false;
2032 }
2033
Prabhanjan Kandula08222fc2013-07-10 17:20:59 +05302034 if(ctx->listStats[mDpy].numAppLayers > MAX_NUM_APP_LAYERS) {
2035 ALOGD_IF(isDebug(),"%s: Exceeding max layer count", __FUNCTION__);
2036 return true;
2037 }
2038
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002039 /* reset Invalidator */
Saurabh Shah2d998a92013-05-14 17:55:58 -07002040 if(idleInvalidator && !sIdleFallBack && mCurrentFrame.mdpCount)
Saurabh Shahb2117fe2014-01-23 18:39:01 -08002041 idleInvalidator->handleUpdateEvent();
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002042
Naseer Ahmed54821fe2012-11-28 18:44:38 -05002043 overlay::Overlay& ov = *ctx->mOverlay;
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002044 LayerProp *layerProp = ctx->layerProp[mDpy];
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002045
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002046 int numHwLayers = ctx->listStats[mDpy].numAppLayers;
2047 for(int i = 0; i < numHwLayers && mCurrentFrame.mdpCount; i++ )
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002048 {
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002049 if(mCurrentFrame.isFBComposed[i]) continue;
2050
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002051 hwc_layer_1_t *layer = &list->hwLayers[i];
Saurabh Shahacf10202013-02-26 10:15:15 -08002052 private_handle_t *hnd = (private_handle_t *)layer->handle;
2053 if(!hnd) {
2054 ALOGE("%s handle null", __FUNCTION__);
2055 return false;
2056 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002057
2058 if(!(layerProp[i].mFlags & HWC_MDPCOMP)) {
2059 continue;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002060 }
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002061
Jeykumar Sankaran85977e32013-02-25 17:06:08 -08002062 int mdpIndex = mCurrentFrame.layerToMDP[i];
2063
radhakrishnac9a67412013-09-25 17:40:42 +05302064 if(is4kx2kYuvBuffer(hnd) && sEnable4k2kYUVSplit)
2065 {
2066 MdpYUVPipeInfo& pipe_info =
2067 *(MdpYUVPipeInfo*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2068 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
2069 ovutils::eDest indexL = pipe_info.lIndex;
2070 ovutils::eDest indexR = pipe_info.rIndex;
2071 int fd = hnd->fd;
2072 uint32_t offset = hnd->offset;
2073 if(rot) {
2074 rot->queueBuffer(fd, offset);
2075 fd = rot->getDstMemId();
2076 offset = rot->getDstOffset();
2077 }
2078 if(indexL != ovutils::OV_INVALID) {
2079 ovutils::eDest destL = (ovutils::eDest)indexL;
2080 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2081 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2082 if (!ov.queueBuffer(fd, offset, destL)) {
2083 ALOGE("%s: queueBuffer failed for display:%d",
2084 __FUNCTION__, mDpy);
2085 return false;
2086 }
2087 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002088
radhakrishnac9a67412013-09-25 17:40:42 +05302089 if(indexR != ovutils::OV_INVALID) {
2090 ovutils::eDest destR = (ovutils::eDest)indexR;
2091 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2092 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2093 if (!ov.queueBuffer(fd, offset, destR)) {
2094 ALOGE("%s: queueBuffer failed for display:%d",
2095 __FUNCTION__, mDpy);
2096 return false;
2097 }
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002098 }
2099 }
radhakrishnac9a67412013-09-25 17:40:42 +05302100 else{
2101 MdpPipeInfoSplit& pipe_info =
2102 *(MdpPipeInfoSplit*)mCurrentFrame.mdpToLayer[mdpIndex].pipeInfo;
2103 Rotator *rot = mCurrentFrame.mdpToLayer[mdpIndex].rot;
Saurabh Shaha9da08f2013-07-03 13:27:53 -07002104
radhakrishnac9a67412013-09-25 17:40:42 +05302105 ovutils::eDest indexL = pipe_info.lIndex;
2106 ovutils::eDest indexR = pipe_info.rIndex;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002107
radhakrishnac9a67412013-09-25 17:40:42 +05302108 int fd = hnd->fd;
2109 int offset = hnd->offset;
2110
2111 if(ctx->mAD->isModeOn()) {
2112 if(ctx->mAD->draw(ctx, fd, offset)) {
2113 fd = ctx->mAD->getDstFd(ctx);
2114 offset = ctx->mAD->getDstOffset(ctx);
2115 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002116 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002117
radhakrishnac9a67412013-09-25 17:40:42 +05302118 if(rot) {
2119 rot->queueBuffer(fd, offset);
2120 fd = rot->getDstMemId();
2121 offset = rot->getDstOffset();
2122 }
2123
2124 //************* play left mixer **********
2125 if(indexL != ovutils::OV_INVALID) {
2126 ovutils::eDest destL = (ovutils::eDest)indexL;
2127 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2128 using pipe: %d", __FUNCTION__, layer, hnd, indexL );
2129 if (!ov.queueBuffer(fd, offset, destL)) {
2130 ALOGE("%s: queueBuffer failed for left mixer",
2131 __FUNCTION__);
2132 return false;
2133 }
2134 }
2135
2136 //************* play right mixer **********
2137 if(indexR != ovutils::OV_INVALID) {
2138 ovutils::eDest destR = (ovutils::eDest)indexR;
2139 ALOGD_IF(isDebug(),"%s: MDP Comp: Drawing layer: %p hnd: %p \
2140 using pipe: %d", __FUNCTION__, layer, hnd, indexR );
2141 if (!ov.queueBuffer(fd, offset, destR)) {
2142 ALOGE("%s: queueBuffer failed for right mixer",
2143 __FUNCTION__);
2144 return false;
2145 }
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002146 }
2147 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002148
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002149 layerProp[i].mFlags &= ~HWC_MDPCOMP;
2150 }
Saurabh Shahacf10202013-02-26 10:15:15 -08002151
Jeykumar Sankaranb551ce42013-01-10 16:26:48 -08002152 return true;
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002153}
Naseer Ahmed7c958d42012-07-31 18:57:03 -07002154}; //namespace
2155