blob: 47cdc6890bc07fd2d4460e43668a5521d4358591 [file] [log] [blame]
Iliyan Malchev202a77d2012-06-11 14:41:12 -07001/*
2 * Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
3
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of Code Aurora Forum, Inc. nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#include <cutils/log.h>
31#include <utils/RefBase.h>
32#include <fcntl.h>
33#include "gralloc_priv.h"
34#include "alloc_controller.h"
35#include "memalloc.h"
36#include "ionalloc.h"
37#include "ashmemalloc.h"
38#include "gr.h"
39
40using namespace gralloc;
41using android::sp;
42
43const int GRALLOC_HEAP_MASK = GRALLOC_USAGE_PRIVATE_ADSP_HEAP |
44 GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP |
45 GRALLOC_USAGE_PRIVATE_SMI_HEAP |
46 GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP |
47 GRALLOC_USAGE_PRIVATE_IOMMU_HEAP |
48 GRALLOC_USAGE_PRIVATE_MM_HEAP |
49 GRALLOC_USAGE_PRIVATE_WRITEBACK_HEAP |
50 GRALLOC_USAGE_PRIVATE_CAMERA_HEAP;
51
52
53//Common functions
54static bool canFallback(int compositionType, int usage, bool triedSystem)
55{
56 // Fallback to system heap when alloc fails unless
57 // 1. Composition type is MDP
58 // 2. Alloc from system heap was already tried
59 // 3. The heap type is requsted explicitly
60 // 4. The heap type is protected
61 // 5. The buffer is meant for external display only
62
63 if(compositionType == MDP_COMPOSITION)
64 return false;
65 if(triedSystem)
66 return false;
67 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
68 return false;
69 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_EXTERNAL_ONLY))
70 return false;
71 //Return true by default
72 return true;
73}
74
75static bool useUncached(int usage)
76{
77 // System heaps cannot be uncached
78 if(usage & (GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP |
79 GRALLOC_USAGE_PRIVATE_IOMMU_HEAP))
80 return false;
81 if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
82 return true;
83 return false;
84}
85
86sp<IAllocController> IAllocController::sController = NULL;
87sp<IAllocController> IAllocController::getInstance(bool useMasterHeap)
88{
89 if(sController == NULL) {
90#ifdef USE_ION
91 sController = new IonController();
92#else
93 if(useMasterHeap)
94 sController = new PmemAshmemController();
95 else
96 sController = new PmemKernelController();
97#endif
98 }
99 return sController;
100}
101
102
103//-------------- IonController-----------------------//
104IonController::IonController()
105{
106 mIonAlloc = new IonAlloc();
107}
108
109int IonController::allocate(alloc_data& data, int usage,
110 int compositionType)
111{
112 int ionFlags = 0;
113 int ret;
114 bool noncontig = false;
115
116 data.uncached = useUncached(usage);
117 if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
118 ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
119
120 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP) {
121 ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
122 noncontig = true;
123 }
124
125 if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
126 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
127
128 if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP)
129 ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
130
131 if(usage & GRALLOC_USAGE_PRIVATE_WRITEBACK_HEAP)
132 ionFlags |= ION_HEAP(ION_CP_WB_HEAP_ID);
133
134 if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
135 ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
136
137 if(usage & GRALLOC_USAGE_PROTECTED)
138 ionFlags |= ION_SECURE;
139
140 if(usage & GRALLOC_USAGE_PRIVATE_DO_NOT_MAP)
141 data.allocType = private_handle_t::PRIV_FLAGS_NOT_MAPPED;
142
143 // if no flags are set, default to
144 // SF + IOMMU heaps, so that bypass can work
145 // we can fall back to system heap if
146 // we run out.
147 if(!ionFlags)
148 ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
149
150 data.flags = ionFlags;
151 ret = mIonAlloc->alloc_buffer(data);
152 // Fallback
153 if(ret < 0 && canFallback(compositionType,
154 usage,
155 (ionFlags & ION_SYSTEM_HEAP_ID)))
156 {
157 ALOGW("Falling back to system heap");
158 data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
159 noncontig = true;
160 ret = mIonAlloc->alloc_buffer(data);
161 }
162
163 if(ret >= 0 ) {
164 data.allocType = private_handle_t::PRIV_FLAGS_USES_ION;
165 if(noncontig)
166 data.allocType |= private_handle_t::PRIV_FLAGS_NONCONTIGUOUS_MEM;
167 if(ionFlags & ION_SECURE)
168 data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
169 }
170
171 return ret;
172}
173
174sp<IMemAlloc> IonController::getAllocator(int flags)
175{
176 sp<IMemAlloc> memalloc;
177 if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
178 memalloc = mIonAlloc;
179 } else {
180 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
181 }
182
183 return memalloc;
184}
185
186#if 0
187//-------------- PmemKernelController-----------------------//
188
189PmemKernelController::PmemKernelController()
190{
191 mPmemAdspAlloc = new PmemKernelAlloc(DEVICE_PMEM_ADSP);
192 // XXX: Right now, there is no need to maintain an instance
193 // of the SMI allocator as we need it only in a few cases
194}
195
196PmemKernelController::~PmemKernelController()
197{
198}
199
200int PmemKernelController::allocate(alloc_data& data, int usage,
201 int compositionType)
202{
203 int ret = 0;
204 bool adspFallback = false;
205 if (!(usage & GRALLOC_USAGE_PRIVATE_SMI_HEAP))
206 adspFallback = true;
207
208 // Try SMI first
209 if ((usage & GRALLOC_USAGE_PRIVATE_SMI_HEAP) ||
210 (usage & GRALLOC_USAGE_EXTERNAL_DISP) ||
211 (usage & GRALLOC_USAGE_PROTECTED))
212 {
213 int tempFd = open(DEVICE_PMEM_SMIPOOL, O_RDWR, 0);
214 if(tempFd > 0) {
215 close(tempFd);
216 sp<IMemAlloc> memalloc;
217 memalloc = new PmemKernelAlloc(DEVICE_PMEM_SMIPOOL);
218 ret = memalloc->alloc_buffer(data);
219 if(ret >= 0)
220 return ret;
221 else {
222 if(adspFallback)
223 ALOGW("Allocation from SMI failed, trying ADSP");
224 }
225 }
226 }
227
228 if ((usage & GRALLOC_USAGE_PRIVATE_ADSP_HEAP) || adspFallback) {
229 ret = mPmemAdspAlloc->alloc_buffer(data);
230 }
231 return ret;
232}
233
234sp<IMemAlloc> PmemKernelController::getAllocator(int flags)
235{
236 sp<IMemAlloc> memalloc;
237 if (flags & private_handle_t::PRIV_FLAGS_USES_PMEM_ADSP)
238 memalloc = mPmemAdspAlloc;
239 else {
240 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
241 memalloc = NULL;
242 }
243
244 return memalloc;
245}
246
247//-------------- PmemAshmmemController-----------------------//
248
249PmemAshmemController::PmemAshmemController()
250{
251 mPmemUserspaceAlloc = new PmemUserspaceAlloc();
252 mAshmemAlloc = new AshmemAlloc();
253 mPmemKernelCtrl = new PmemKernelController();
254}
255
256PmemAshmemController::~PmemAshmemController()
257{
258}
259
260int PmemAshmemController::allocate(alloc_data& data, int usage,
261 int compositionType)
262{
263 int ret = 0;
264
265 // Make buffers cacheable by default
266 data.uncached = false;
267
268 // Override if we explicitly need uncached buffers
269 if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
270 data.uncached = true;
271
272 // If ADSP or SMI is requested use the kernel controller
273 if(usage & (GRALLOC_USAGE_PRIVATE_ADSP_HEAP|
274 GRALLOC_USAGE_PRIVATE_SMI_HEAP)) {
275 ret = mPmemKernelCtrl->allocate(data, usage, compositionType);
276 if(ret < 0)
277 ALOGE("%s: Failed to allocate ADSP/SMI memory", __func__);
278 else
279 data.allocType = private_handle_t::PRIV_FLAGS_USES_PMEM_ADSP;
280 return ret;
281 }
282
283 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP) {
284 ret = mAshmemAlloc->alloc_buffer(data);
285 if(ret >= 0) {
286 data.allocType = private_handle_t::PRIV_FLAGS_USES_ASHMEM;
287 data.allocType |= private_handle_t::PRIV_FLAGS_NONCONTIGUOUS_MEM;
288 }
289 return ret;
290 }
291
292 // if no memory specific flags are set,
293 // default to EBI heap, so that bypass
294 // can work. We can fall back to system
295 // heap if we run out.
296 ret = mPmemUserspaceAlloc->alloc_buffer(data);
297
298 // Fallback
299 if(ret >= 0 ) {
300 data.allocType = private_handle_t::PRIV_FLAGS_USES_PMEM;
301 } else if(ret < 0 && canFallback(compositionType, usage, false)) {
302 ALOGW("Falling back to ashmem");
303 ret = mAshmemAlloc->alloc_buffer(data);
304 if(ret >= 0) {
305 data.allocType = private_handle_t::PRIV_FLAGS_USES_ASHMEM;
306 data.allocType |= private_handle_t::PRIV_FLAGS_NONCONTIGUOUS_MEM;
307 }
308 }
309
310 return ret;
311}
312
313sp<IMemAlloc> PmemAshmemController::getAllocator(int flags)
314{
315 sp<IMemAlloc> memalloc;
316 if (flags & private_handle_t::PRIV_FLAGS_USES_PMEM)
317 memalloc = mPmemUserspaceAlloc;
318 else if (flags & private_handle_t::PRIV_FLAGS_USES_PMEM_ADSP)
319 memalloc = mPmemKernelCtrl->getAllocator(flags);
320 else if (flags & private_handle_t::PRIV_FLAGS_USES_ASHMEM)
321 memalloc = mAshmemAlloc;
322 else {
323 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
324 memalloc = NULL;
325 }
326
327 return memalloc;
328}
329#endif
330
331size_t getBufferSizeAndDimensions(int width, int height, int format,
332 int& alignedw, int &alignedh)
333{
334 size_t size;
335
336 alignedw = ALIGN(width, 32);
337 alignedh = ALIGN(height, 32);
338 switch (format) {
339 case HAL_PIXEL_FORMAT_RGBA_8888:
340 case HAL_PIXEL_FORMAT_RGBX_8888:
341 case HAL_PIXEL_FORMAT_BGRA_8888:
342 size = alignedw * alignedh * 4;
343 break;
344 case HAL_PIXEL_FORMAT_RGB_888:
345 size = alignedw * alignedh * 3;
346 break;
347 case HAL_PIXEL_FORMAT_RGB_565:
348 case HAL_PIXEL_FORMAT_RGBA_5551:
349 case HAL_PIXEL_FORMAT_RGBA_4444:
350 size = alignedw * alignedh * 2;
351 break;
352
353 // adreno formats
354 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: // NV21
355 size = ALIGN(alignedw*alignedh, 4096);
356 size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
357 break;
358 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: // NV12
359 // The chroma plane is subsampled,
360 // but the pitch in bytes is unchanged
361 // The GPU needs 4K alignment, but the video decoder needs 8K
362 alignedw = ALIGN(width, 128);
363 size = ALIGN( alignedw * alignedh, 8192);
364 size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
365 break;
366 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
367 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
368 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
369 case HAL_PIXEL_FORMAT_YV12:
370 if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
371 ALOGE("w or h is odd for the YV12 format");
372 return -EINVAL;
373 }
374 alignedw = ALIGN(width, 16);
375 alignedh = height;
376 if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) {
377 // The encoder requires a 2K aligned chroma offset.
378 size = ALIGN(alignedw*alignedh, 2048) +
379 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
380 } else {
381 size = alignedw*alignedh +
382 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
383 }
384 size = ALIGN(size, 4096);
385 break;
386
387 default:
388 ALOGE("unrecognized pixel format: %d", format);
389 return -EINVAL;
390 }
391
392 return size;
393}
394
395// Allocate buffer from width, height and format into a
396// private_handle_t. It is the responsibility of the caller
397// to free the buffer using the free_buffer function
398int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
399{
400 alloc_data data;
401 int alignedw, alignedh;
402 android::sp<gralloc::IAllocController> sAlloc =
403 gralloc::IAllocController::getInstance(false);
404 data.base = 0;
405 data.fd = -1;
406 data.offset = 0;
407 data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh);
408 data.align = getpagesize();
409 data.uncached = useUncached(usage);
410 int allocFlags = usage;
411
412 int err = sAlloc->allocate(data, allocFlags, 0);
413 if (0 != err) {
414 ALOGE("%s: allocate failed", __FUNCTION__);
415 return -ENOMEM;
416 }
417
418 private_handle_t* hnd = new private_handle_t(data.fd, data.size,
419 data.allocType, 0, format, alignedw, alignedh);
420 hnd->base = (int) data.base;
421 hnd->offset = data.offset;
422 hnd->gpuaddr = 0;
423 *pHnd = hnd;
424 return 0;
425}
426
427void free_buffer(private_handle_t *hnd)
428{
429 android::sp<gralloc::IAllocController> sAlloc =
430 gralloc::IAllocController::getInstance(false);
431 if (hnd && hnd->fd > 0) {
432 sp<IMemAlloc> memalloc = sAlloc->getAllocator(hnd->flags);
433 memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
434 }
435 if(hnd)
436 delete hnd;
437
438}